Commit 98603e37 authored by Bryce Hepner's avatar Bryce Hepner

Many more tests with numbers, still lacking lwz

parent 9d6dc3a6
...@@ -7,4 +7,5 @@ attic ...@@ -7,4 +7,5 @@ attic
/compress_experiment.ipynb /compress_experiment.ipynb
*.txt *.txt
!backup.txt !backup.txt
*.tiff *.tiff
\ No newline at end of file *.png
\ No newline at end of file
{
"cells": [
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
"from WorkingPyDemo import *\n",
"from time import time\n",
"from scipy import misc\n",
"import imageio\n",
"import png"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"262\n",
"0.39837348558217806\n"
]
}
],
"source": [
"scenes = file_extractor(folder_name)\n",
"images = image_extractor(scenes)\n",
"print(len(images))\n",
"newnamesforlater = []\n",
"file_sizes_new = []\n",
"file_sizes_old = []\n",
"list_dic = np.load(\"first_dic.npy\", allow_pickle=\"TRUE\")\n",
"bins = [21,32,48]\n",
"for i in range(len(images)):\n",
"\n",
" if images[i][-5:] == \".tiff\":\n",
" newname = images[i][:-5]\n",
" else:\n",
" newname = images[i][:-4]\n",
" newnamesforlater.append(newname + \"_Compressed.txt\")\n",
" with open(newname + \"_Compressed.txt\", 'wb') as f:\n",
" f.write(inletters)\n",
" file_sizes_new.append((os.path.getsize(newname + \"_Compressed.txt\")))\n",
" file_sizes_old.append((os.path.getsize(images[i])))\n",
"file_sizes_new.append(os.path.getsize(\"first_dic.npy\"))\n",
"print(np.sum(file_sizes_new)/np.sum(file_sizes_old))"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.6104417990437042\n"
]
}
],
"source": [
"pngsizes = []\n",
"for i, filename in enumerate(images):\n",
" newimage = Image.open(filename)\n",
" newimage = np.array(newimage)\n",
" newimage = newimage[1:]\n",
" with open(newnamesforlater[i][:-4] + \".png\", 'wb') as f:\n",
" writer = png.Writer(newimage.shape[1], newimage.shape[0], greyscale=True, bitdepth=16)\n",
" writer.write(f, newimage)\n",
" # imageio.imwrite(newnamesforlater[i][:-4] + \".png\", newimage)\n",
" # newimage.close()\n",
" pngsizes.append(os.path.getsize(newnamesforlater[i][:-4] + \".png\"))\n",
"print(np.sum(pngsizes)/np.sum(file_sizes_old))\n"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.5676821409714287\n"
]
}
],
"source": [
"print(os.path.getsize(\"1626032610_393963_0.xcf\")/os.path.getsize(\"images/1626032610_393963/1626032610_393963_0.tiff\"))"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.4855910784741866\n"
]
}
],
"source": [
"print(os.path.getsize(\"1626032610_393963_0.png\")/os.path.getsize(\"images/1626032610_393963/1626032610_393963_0.tiff\"))"
]
},
{
"cell_type": "code",
"execution_count": 29,
"metadata": {},
"outputs": [],
"source": [
"for i, filename in enumerate(images):\n",
" original = Image.open(filename)\n",
" compressed = Image.open(newnamesforlater[i][:-4] + \".png\")\n",
" original = np.array(original)[1:]\n",
" compressed = np.array(compressed)\n",
" if not np.allclose(original, compressed):\n",
" print(i)"
]
},
{
"cell_type": "code",
"execution_count": 31,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.34740136372336333\n"
]
}
],
"source": [
"print((np.sum(pngsizes) - np.sum(file_sizes_new))/np.sum(pngsizes))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"interpreter": {
"hash": "916dbcbb3f70747c44a77c7bcd40155683ae19c65e1c03b4aa3499c5328201f1"
},
"kernelspec": {
"display_name": "Python 3.8.10 64-bit",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}
...@@ -529,7 +529,7 @@ if __name__ == "__main__": ...@@ -529,7 +529,7 @@ if __name__ == "__main__":
print(np.sum(file_sizes_new)/np.sum(file_sizes_old)) print(np.sum(file_sizes_new)/np.sum(file_sizes_old))
# list_dic = np.load("first_dic.npy", allow_pickle="TRUE") # list_dic = np.load("first_dic.npy", allow_pickle="TRUE")
# bins = [21,32,48] # bins = [21,32,48]
# starttime = time() # starttime = time()
# for i,item in enumerate(newnamesforlater[0:6]): # for i,item in enumerate(newnamesforlater[0:6]):
# image, new_error, diff = huffman(images[i], 4, False) # image, new_error, diff = huffman(images[i], 4, False)
# encoded_string2 = bytes_to_bitstring(read_from_file(item)) # encoded_string2 = bytes_to_bitstring(read_from_file(item))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment