Commit 625e9d15 authored by Bryce Hepner's avatar Bryce Hepner

Made some changes, have to rerun everything

parent 00cfa297
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
"cells": [ "cells": [
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 1, "execution_count": 2,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 15, "execution_count": 19,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
...@@ -51,12 +51,29 @@ ...@@ -51,12 +51,29 @@
" file_sizes_old.append((os.path.getsize(images[i])))\n", " file_sizes_old.append((os.path.getsize(images[i])))\n",
"file_sizes_new.append(os.path.getsize(\"first_dic.npy\"))\n", "file_sizes_new.append(os.path.getsize(\"first_dic.npy\"))\n",
"print(len(newnamesforlater))\n", "print(len(newnamesforlater))\n",
"print(np.sum(file_sizes_new)/np.sum(file_sizes_old))" "print(np.sum(file_sizes_new)/np.sum(file_sizes_old))\n"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 16, "execution_count": 21,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[0, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 260942, 231302]\n"
]
}
],
"source": [
"print(file_sizes_new)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
...@@ -70,12 +87,12 @@ ...@@ -70,12 +87,12 @@
"source": [ "source": [
"pngsizes = []\n", "pngsizes = []\n",
"for i, filename in enumerate(images):\n", "for i, filename in enumerate(images):\n",
" newimage = Image.open(filename)\n", " # newimage = Image.open(filename)\n",
" newimage = np.array(newimage)\n", " # newimage = np.array(newimage)\n",
" newimage = newimage[1:]\n", " # newimage = newimage[1:]\n",
" with open(newnamesforlater[i][:-4] + \".png\", 'wb') as f:\n", " # with open(newnamesforlater[i][:-4] + \".png\", 'wb') as f:\n",
" writer = png.Writer(newimage.shape[1], newimage.shape[0], greyscale=True, bitdepth=16)\n", " # writer = png.Writer(newimage.shape[1], newimage.shape[0], greyscale=True, bitdepth=16)\n",
" writer.write(f, newimage)\n", " # writer.write(f, newimage)\n",
" # imageio.imwrite(newnamesforlater[i][:-4] + \".png\", newimage)\n", " # imageio.imwrite(newnamesforlater[i][:-4] + \".png\", newimage)\n",
" # newimage.close()\n", " # newimage.close()\n",
" pngsizes.append(os.path.getsize(newnamesforlater[i][:-4] + \".png\"))\n", " pngsizes.append(os.path.getsize(newnamesforlater[i][:-4] + \".png\"))\n",
...@@ -118,7 +135,7 @@ ...@@ -118,7 +135,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 17, "execution_count": 5,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -157,7 +174,7 @@ ...@@ -157,7 +174,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 19, "execution_count": 6,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -167,14 +184,14 @@ ...@@ -167,14 +184,14 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 20, "execution_count": 9,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"0.6185872808807661\n" "0.6177078151903203\n"
] ]
} }
], ],
...@@ -182,6 +199,9 @@ ...@@ -182,6 +199,9 @@
"lwz_sizes = []\n", "lwz_sizes = []\n",
"for i, filename in enumerate(images):\n", "for i, filename in enumerate(images):\n",
" newimage = Image.open(filename)\n", " newimage = Image.open(filename)\n",
" newimage = np.array(newimage)\n",
" newimage = newimage[1:]\n",
" newimage = Image.fromarray(newimage)\n",
" newimage.save(newnamesforlater[i][:-4]+ \"lzw\" + \".tiff\", compression='tiff_lzw', tiffinfo={317: 2})\n", " newimage.save(newnamesforlater[i][:-4]+ \"lzw\" + \".tiff\", compression='tiff_lzw', tiffinfo={317: 2})\n",
"\n", "\n",
" lwz_sizes.append(os.path.getsize(newnamesforlater[i][:-4]+ \"lzw\" + \".tiff\"))\n", " lwz_sizes.append(os.path.getsize(newnamesforlater[i][:-4]+ \"lzw\" + \".tiff\"))\n",
...@@ -190,7 +210,24 @@ ...@@ -190,7 +210,24 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 22, "execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.3575310312509913\n"
]
}
],
"source": [
"print((np.sum(lwz_sizes) - np.sum(file_sizes_new))/np.sum(lwz_sizes))"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
...@@ -240,15 +277,18 @@ ...@@ -240,15 +277,18 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 28, "execution_count": 1,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
"name": "stdout", "ename": "NameError",
"output_type": "stream", "evalue": "name 'all_image_extractor' is not defined",
"text": [ "output_type": "error",
"1178\n", "traceback": [
"1178\n" "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m/home/bryce/git/master/FullTester.ipynb Cell 13'\u001b[0m in \u001b[0;36m<cell line: 1>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> <a href='vscode-notebook-cell:/home/bryce/git/master/FullTester.ipynb#ch0000012?line=0'>1</a>\u001b[0m all_files \u001b[39m=\u001b[39m all_image_extractor(scenes)\n\u001b[1;32m <a href='vscode-notebook-cell:/home/bryce/git/master/FullTester.ipynb#ch0000012?line=1'>2</a>\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39mlen\u001b[39m(all_files))\n\u001b[1;32m <a href='vscode-notebook-cell:/home/bryce/git/master/FullTester.ipynb#ch0000012?line=2'>3</a>\u001b[0m \u001b[39mfor\u001b[39;00m i, item \u001b[39min\u001b[39;00m \u001b[39menumerate\u001b[39m(all_files):\n\u001b[1;32m <a href='vscode-notebook-cell:/home/bryce/git/master/FullTester.ipynb#ch0000012?line=3'>4</a>\u001b[0m \u001b[39m# print(item[-18:])\u001b[39;00m\n",
"\u001b[0;31mNameError\u001b[0m: name 'all_image_extractor' is not defined"
] ]
} }
], ],
...@@ -257,6 +297,7 @@ ...@@ -257,6 +297,7 @@
"print(len(all_files))\n", "print(len(all_files))\n",
"for i, item in enumerate(all_files):\n", "for i, item in enumerate(all_files):\n",
" # print(item[-18:])\n", " # print(item[-18:])\n",
" print(item[-4:])\n",
" if item[-4:] == \"..png\":\n", " if item[-4:] == \"..png\":\n",
" os.remove(item)\n", " os.remove(item)\n",
"scenes = file_extractor(folder_name)\n", "scenes = file_extractor(folder_name)\n",
......
...@@ -9,6 +9,7 @@ from sklearn.neighbors import KernelDensity ...@@ -9,6 +9,7 @@ from sklearn.neighbors import KernelDensity
from collections import Counter from collections import Counter
import numpy.linalg as la import numpy.linalg as la
from time import time from time import time
from time import sleep
import tifffile as tiff import tifffile as tiff
folder_name = "images" folder_name = "images"
outputlocation = "" outputlocation = ""
...@@ -506,11 +507,12 @@ if __name__ == "__main__": ...@@ -506,11 +507,12 @@ if __name__ == "__main__":
scenes = file_extractor(folder_name) scenes = file_extractor(folder_name)
images = image_extractor(scenes) images = image_extractor(scenes)
newnamesforlater = [] newnamesforlater = []
# list_dic, bins = make_dictionary(images, 4, False) list_dic, bins = make_dictionary(images, 4, False)
file_sizes_new = [] file_sizes_new = []
file_sizes_old = [] file_sizes_old = []
# list_dic = np.load("first_dic.npy", allow_pickle="TRUE")
# np.save("first_dic.npy", list_dic) bins = [21,32,48]
np.save("first_dic.npy", list_dic)
for i in range(len(images)): for i in range(len(images)):
image, new_error, diff = huffman(images[i], 4, False) image, new_error, diff = huffman(images[i], 4, False)
encoded_string = encoder(new_error, list_dic, diff, bins) encoded_string = encoder(new_error, list_dic, diff, bins)
...@@ -525,16 +527,22 @@ if __name__ == "__main__": ...@@ -525,16 +527,22 @@ if __name__ == "__main__":
f.write(inletters) f.write(inletters)
file_sizes_new.append((os.path.getsize(newname + "_Compressed.txt"))) file_sizes_new.append((os.path.getsize(newname + "_Compressed.txt")))
file_sizes_old.append((os.path.getsize(images[i]))) file_sizes_old.append((os.path.getsize(images[i])))
sleep(5)
if i % 50 == 0:
print(i)
sleep(20)
file_sizes_new.append(os.path.getsize("first_dic.npy")) file_sizes_new.append(os.path.getsize("first_dic.npy"))
print(np.sum(file_sizes_new)/np.sum(file_sizes_old)) print(np.sum(file_sizes_new)/np.sum(file_sizes_old))
# list_dic = np.load("first_dic.npy", allow_pickle="TRUE") list_dic = np.load("first_dic.npy", allow_pickle="TRUE")
# bins = [21,32,48] bins = [21,32,48]
# starttime = time() # starttime = time()
# for i,item in enumerate(newnamesforlater[0:6]): for i,item in enumerate(newnamesforlater[2:5]):
# image, new_error, diff = huffman(images[i], 4, False) print(item)
# encoded_string2 = bytes_to_bitstring(read_from_file(item)) image, new_error, diff = huffman(images[i], 4, False)
# reconstruct_image = decoder(encoded_string2, list_dic, bins, False) encoded_string2 = bytes_to_bitstring(read_from_file(item))
# print(np.allclose(image, reconstruct_image)) reconstruct_image = decoder(encoded_string2, list_dic, bins, False)
print(np.allclose(image, reconstruct_image))
# text_to_tiff("images/1626033496_437803/1626033496_437803_3._Compressed.txt", list_dic, bins) # text_to_tiff("images/1626033496_437803/1626033496_437803_3._Compressed.txt", list_dic, bins)
# original_image = Image.open("images/1626033496_437803/1626033496_437803_3.tiff") # original_image = Image.open("images/1626033496_437803/1626033496_437803_3.tiff")
# original_image = np.array(original_image)[1:] # original_image = np.array(original_image)[1:]
......
No preview for this file type
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment