Skip to content

Instantly share code, notes, and snippets.

@danilw
Last active December 5, 2024 14:53
Show Gist options
  • Save danilw/8a2d212f528fac23287306765fcfbfcc to your computer and use it in GitHub Desktop.
Save danilw/8a2d212f528fac23287306765fcfbfcc to your computer and use it in GitHub Desktop.
#!/usr/bin/env python3
# DO NOT USE
# WARNING - THIS IS JUNK, MADE FOR ONE USE MAY NOT WORK.
# Expected to work only with 3-length 1024 tokens range.
# WARNING shadertoy PRIVATE API_key
# https://www.shadertoy.com/view/cdXyWr
# AND REMEMBER ABOUT UPDATING num_elems etc AND et_a1-et_a3, line 204+ test_tokens_uniq
# https://www.shadertoy.com/howto
shadertoy_API_key = "Nd8jhN"
import os, sys, json, urllib.request, urllib.parse
import numpy as np
from PIL import Image
from bitstring import ConstBitStream
import pandas as pd
import math
text_file_name = "shadertoy_file_with_ids.json"
def download_ids():
if(os.path.isfile(text_file_name)):
print("data already loaded, using exist file " + text_file_name)
return
print("Loading from shadertoy API...")
url = 'https://www.shadertoy.com/api/v1/shaders?key='+shadertoy_API_key
with urllib.request.urlopen(url) as req:
text_json = req.read().decode('utf-8')
j = json.loads(text_json)
f = open (text_file_name, "w")
f.write (json.dumps (j, indent=2))
f.close ()
print("original json saved to file " + text_file_name)
def local_assert(a, b):
if(not a):
print(b)
exit(1)
def get_unique_tokens(tokens, rangex):
a = np.array(tokens)
u, c = np.unique(a, return_counts=True)
dup1 = u[c > rangex]
dup2 = u[c <=rangex]
return dup1, dup2
def get_unique_tokens_one(tokens, rangex):
a = np.array(tokens)
u, c = np.unique(a, return_counts=True)
ret = u[c ==rangex]
return ret
def test_tokens_uniq(resize_token_3_01, resize_token_3_02, uni, uniq_3_01_all, uniq_3_02_all):
uniq_3_01_uniq = get_unique_tokens_one(resize_token_3_01,uni)
uniq_3_02_uniq = get_unique_tokens_one(resize_token_3_02,uni)
print("")
print("test_tokens_uniq " + str(uni))
print("")
print("3 char token:")
print("Single left - "+str(len(uniq_3_01_uniq)))
print("Single right - "+str(len(uniq_3_02_uniq)))
print("uniq single left:")
print(uniq_3_01_uniq)
print("uniq single right:")
print(uniq_3_02_uniq)
print("INDEX:")
tidx = []
for a in range(len(uniq_3_01_uniq)):
for b in range(len(uniq_3_01_all)):
if uniq_3_01_uniq[a]==uniq_3_01_all[b]:
tidx.append(b)
print("uniq single left:")
print(tidx)
tidx = []
for a in range(len(uniq_3_02_uniq)):
for b in range(len(uniq_3_02_all)):
if uniq_3_02_uniq[a]==uniq_3_02_all[b]:
tidx.append(b)
print("uniq single right:")
print(tidx)
print("")
print("DONE test_tokens_uniq")
print("")
def find_my_tokens(uniq_3_01_all, uniq_3_02_all):
my_lst = ["NslGRN", "sldGDf", "3dlSzs", "NljfzV", "tsfGW4", "wldcW2", "4lKcDD", "3syXDD", "wdS3D3", "WdBGzc", "wt2fWw",
"lttfR8", "4dGBWy", "4sdBDn", "NlScDz", "cdXyWr"]
print("")
print("find_my_tokens:")
tidx = []
for a in range(len(my_lst)):
for b in range(len(uniq_3_01_all)):
if my_lst[a][0:3]==uniq_3_01_all[b]:
tidx.append(b)
print("my shaders left:")
print(tidx)
tidx = []
for a in range(len(my_lst)):
for b in range(len(uniq_3_02_all)):
if my_lst[a][3:6]==uniq_3_02_all[b]:
tidx.append(b)
print("my shaders right:")
print(tidx)
print("")
def process_json():
print("WARNING - THIS IS JUNK, MADE FOR ONE USE MAY NOT WORK. Expected to work only with 3-length 1024 tokens range.")
print("AND REMEMBER ABOUT UPDATING num_elems etc AND et_a1-et_a3, line 204+ test_tokens_uniq")
local_assert(os.path.getsize(text_file_name) < 1*1024*1024, "file is larger than 1Mb, stop")
text_json = ""
with open(text_file_name) as f:
while True:
line = f.readline()
if not line:
break
text_json+= line
j = json.loads(text_json)
print("")
print("data loaded from file " + text_file_name)
print("calculating...")
local_assert(len(j["Results"])==j["Shaders"], "expected Results and Shaders in json, stop")
resize_token_2_01 = []
resize_token_2_01_1 = []
resize_token_2_02 = []
resize_token_2_02_1 = []
resize_token_2_03 = []
resize_token_2_03_1 = []
resize_token_3_01 = []
resize_token_3_02 = []
resize_token_4_4 = []
resize_token_5_1 = []
resize_token_5_2 = []
resize_token_1_01 = []
resize_token_1_02 = []
resize_token_1_03 = []
resize_token_1_04 = []
resize_token_1_05 = []
resize_token_1_06 = []
for s in j["Results"]:
local_assert(len(s)==6, "lenght of ID expected 6, stop")
resize_token_1_01.append(s[0])
resize_token_1_02.append(s[1])
resize_token_1_03.append(s[2])
resize_token_1_04.append(s[3])
resize_token_1_05.append(s[4])
resize_token_1_06.append(s[5])
resize_token_2_01.append(s[0:2])
resize_token_2_01_1.append(s[1:3])
resize_token_2_02.append(s[2:4])
resize_token_2_02_1.append(s[3:5])
resize_token_2_03.append(s[4:6])
resize_token_2_03_1.append(s[0]+s[5])
resize_token_3_01.append(s[0:3])
resize_token_3_02.append(s[3:6])
#resize_token_4_4.append(s[0:4]) #bad too much?
resize_token_4_4.append(s[1:5]) #2048 x 512
resize_token_5_1.append(s[1:6])
resize_token_5_2.append(s[0:5])
uniq_1_01_all, uniq_1_01_uniq = get_unique_tokens(resize_token_1_01,1)
uniq_1_02_all, uniq_1_02_uniq = get_unique_tokens(resize_token_1_02,1)
uniq_1_03_all, uniq_1_03_uniq = get_unique_tokens(resize_token_1_03,1)
uniq_1_04_all, uniq_1_04_uniq = get_unique_tokens(resize_token_1_04,1)
uniq_1_05_all, uniq_1_05_uniq = get_unique_tokens(resize_token_1_05,1)
uniq_1_06_all, uniq_1_06_uniq = get_unique_tokens(resize_token_1_06,1)
uniq_2_01_all, uniq_2_01_uniq = get_unique_tokens(resize_token_2_01,1)
uniq_2_02_all, uniq_2_02_uniq = get_unique_tokens(resize_token_2_02,1)
uniq_2_03_all, uniq_2_03_uniq = get_unique_tokens(resize_token_2_03,1)
uniq_2_01_all_1, uniq_2_01_uniq_1 = get_unique_tokens(resize_token_2_01_1,1)
uniq_2_02_all_1, uniq_2_02_uniq_1 = get_unique_tokens(resize_token_2_02_1,1)
uniq_2_03_all_1, uniq_2_03_uniq_1 = get_unique_tokens(resize_token_2_03_1,1)
uniq_3_01_all, uniq_3_01_uniq = get_unique_tokens(resize_token_3_01,0)
uniq_3_02_all, uniq_3_02_uniq = get_unique_tokens(resize_token_3_02,0)
uniq_4_4_all, uniq_4_4_uniq = get_unique_tokens(resize_token_4_4,1)
uniq_5_1_all, uniq_5_1_uniq = get_unique_tokens(resize_token_5_1,1)
uniq_5_2_all, uniq_5_2_uniq = get_unique_tokens(resize_token_5_2,1)
test_tokens_uniq(resize_token_3_01, resize_token_3_02, 1, uniq_3_01_all, uniq_3_02_all)
test_tokens_uniq(resize_token_3_01, resize_token_3_02, 2, uniq_3_01_all, uniq_3_02_all)
test_tokens_uniq(resize_token_3_01, resize_token_3_02, 3, uniq_3_01_all, uniq_3_02_all)
test_tokens_uniq(resize_token_3_01, resize_token_3_02, 10, uniq_3_01_all, uniq_3_02_all)
find_my_tokens(uniq_3_01_all, uniq_3_02_all)
#exit()
print("")
print("Total number of shaders on Shadertoy, returned by API - "+str(len(j["Results"])))
print("")
print("4 char token:")
print("Unique tokens 4 char-length - "+str(len(uniq_4_4_all)))
print("4 char-length token with Single connection - "+str(len(uniq_4_4_uniq)))
print("")
print("3 char token:")
print("123___ all - "+str(len(uniq_3_01_all)) + " Single - "+str(len(uniq_3_01_uniq)))
print("___456 all - "+str(len(uniq_3_02_all)) + " Single - "+str(len(uniq_3_02_uniq)))
print("uniq single left:")
print(uniq_3_01_uniq)
print("")
print("2 char token:")
print("12____ all - "+str(len(uniq_2_01_all)) + " Single - "+str(len(uniq_2_01_uniq)))
print("__34__ all - "+str(len(uniq_2_02_all)) + " Single - "+str(len(uniq_2_02_uniq)))
print("____56 all - "+str(len(uniq_2_03_all)) + " Single - "+str(len(uniq_2_03_uniq)))
print("")
print("2 char token ALT:")
print("_23___ all - "+str(len(uniq_2_01_all_1)) + " Single - "+str(len(uniq_2_01_uniq_1)))
print("___45_ all - "+str(len(uniq_2_02_all_1)) + " Single - "+str(len(uniq_2_02_uniq_1)))
print("1____6 all - "+str(len(uniq_2_03_all_1)) + " Single - "+str(len(uniq_2_03_uniq_1)))
print("")
print("1 char token:")
print("token 1 all - "+str(len(uniq_1_01_all))+" uniq - "+str(len(uniq_1_01_uniq)))
print(uniq_1_01_all)
print("token 2 all - "+str(len(uniq_1_02_all))+" uniq - "+str(len(uniq_1_02_uniq)))
print(uniq_1_02_all)
print("token 3 all - "+str(len(uniq_1_03_all))+" uniq - "+str(len(uniq_1_03_uniq)))
print(uniq_1_03_all)
print("token 4 all - "+str(len(uniq_1_04_all))+" uniq - "+str(len(uniq_1_04_uniq)))
print(uniq_1_04_all)
print("token 5 all - "+str(len(uniq_1_05_all))+" uniq - "+str(len(uniq_1_05_uniq)))
print(uniq_1_05_all)
print("token 6 all - "+str(len(uniq_1_06_all))+" uniq - "+str(len(uniq_1_06_uniq)))
print(uniq_1_06_all)
print("")
print("5 char token:")
print("_23456 all - "+str(len(uniq_5_1_all)) + " Single - "+str(len(uniq_5_1_uniq)))
print("12345_ all - "+str(len(uniq_5_2_all)) + " Single - "+str(len(uniq_5_2_uniq)))
print("")
print("Using 3-tokens - sort 3 by num of connections:")
####combine
total_l = uniq_3_01_all.tolist()
total_r = uniq_3_02_all.tolist()
if len(uniq_3_01_uniq.tolist())>0:
for a in uniq_3_01_uniq.tolist():
total_l.append(a)
if len(uniq_3_02_uniq.tolist())>0:
for a in uniq_3_02_uniq.tolist():
total_r.append(a)
####remove
# uniq_3_01_all, uniq_3_01_uniq = get_unique_tokens(resize_token_3_01,1)
# uniq_3_02_all, uniq_3_02_uniq = get_unique_tokens(resize_token_3_02,1)
print("")
print("3 tokens uniq")
tk_l2=[]
tk_l1=[]
for a in total_l:
tk_l2.append(a[0:2])
tk_l1.append(a[2])
uniq_21_3_01_all, uniq_2_3_01_uniq = get_unique_tokens(tk_l2,1)
uniq_11_3_01_all, uniq_1_3_01_uniq = get_unique_tokens(tk_l1,1)
print("")
print("LEFT")
print("uniq_2_3_01_all len " + str(len(uniq_21_3_01_all)))
print("uniq_2_3_01_uniq len " + str(len(uniq_2_3_01_uniq)))
print("uniq_1_3_01_all len " + str(len(uniq_11_3_01_all)))
print("uniq_1_3_01_uniq len " + str(len(uniq_1_3_01_uniq)))
# with open('utokens_21.txt', 'w') as f:
# for a in uniq_2_3_01_all:
# f.write(a)
# f.write('\n')
# with open('utokens_11.txt', 'w') as f:
# for a in uniq_1_3_01_all:
# f.write(a)
# f.write('\n')
tk_l2=[]
tk_l1=[]
for a in total_r:
tk_l2.append(a[0:2])
tk_l1.append(a[2])
uniq_22_3_01_all, uniq_2_3_01_uniq = get_unique_tokens(tk_l2,1)
uniq_12_3_01_all, uniq_1_3_01_uniq = get_unique_tokens(tk_l1,1)
print("")
print("RIGHT")
print("uniq_2_3_01_all len " + str(len(uniq_22_3_01_all)))
print("uniq_2_3_01_uniq len " + str(len(uniq_2_3_01_uniq)))
print("uniq_1_3_01_all len " + str(len(uniq_12_3_01_all)))
print("uniq_1_3_01_uniq len " + str(len(uniq_1_3_01_uniq)))
tokens_to_bin_3w(total_l, total_r, resize_token_3_01, resize_token_3_02, j["Results"], uniq_21_3_01_all, uniq_11_3_01_all, uniq_22_3_01_all, uniq_12_3_01_all)
return
def test_raad_write():
newFileBytes = [123, 3, 255, 0, 100]
with open("test.txt", "wb") as ifile:
for byte in newFileBytes:
ifile.write(byte.to_bytes(1, byteorder='big'))
new_a = []
with open("test.txt", "rb") as ifile:
while True:
data = ifile.read(1)
if not data:
break
new_a.append(ord(data))
print(newFileBytes)
print(new_a)
def to_bits(arr_use,len_bits):
bits_ctr = 0
res_bts = []
tbit = ''
for a in range(len(arr_use)):
bits = ('{:0'+str(len_bits)+'b}').format(arr_use[a])
for b in range(len_bits):
tbit+=bits[b]
bits_ctr+=1
if (bits_ctr%8)==0:
res_bts.append(int(tbit, 2))
tbit = ''
if (bits_ctr%8)!=0:
tctr = (bits_ctr%8)
for a in range(8-tctr):
tbit+='0'
bits_ctr+=1
res_bts.append(int(tbit, 2))
tbit = ''
return res_bts
def array_to_text_uint_enc(arr, arr_name):
text_arr = "uint " + arr_name + "[] = uint[](\n"
tmp_uint = 0
counter = 0
counter_text = 0
array_sz_c = 0
for a in arr:
tmp_uint = (tmp_uint<<8) + a
counter = (counter+1)%4
if counter==0:
counter_text+= 1
text_arr+= ('0x{0:0{1}X}'.format(tmp_uint,8))+"u, " + ("\n" if (counter_text%6)==0 else "")
tmp_uint = 0
array_sz_c+=1
if counter!=0:
for a in range(4-counter):
tmp_uint = (tmp_uint<<8)
counter = 0
counter_text+= 1
text_arr+= ('0x{0:0{1}X}'.format(tmp_uint,8))+"u, " + ("\n" if (counter_text%6)==0 else "")
tmp_uint = 0
array_sz_c+=1
if ((counter_text)%6)==0 and counter_text > 0:
text_arr = text_arr[:-3]
else:
text_arr = text_arr[:-2]
text_arr+= ");"
text_arr+=" // array len "+ str(array_sz_c)
text_arr+="\n"
return text_arr
def py_array_to_text_uint_enc(arr, arr_name):
text_arr = "def get_" + arr_name + "():\n"
text_arr += " return ["
tmp_uint = 0
counter = 0
counter_text = 0
array_sz_c = 0
for a in arr:
tmp_uint = (tmp_uint<<8) + a
counter = (counter+1)%4
if counter==0:
counter_text+= 1
text_arr+= ('0x{0:0{1}X}'.format(tmp_uint,8))+", " + ("\n " if (counter_text%6)==0 else "")
tmp_uint = 0
array_sz_c+=1
if counter!=0:
for a in range(4-counter):
tmp_uint = (tmp_uint<<8)
counter = 0
counter_text+= 1
text_arr+= ('0x{0:0{1}X}'.format(tmp_uint,8))+", " + ("\n " if (counter_text%6)==0 else "")
tmp_uint = 0
array_sz_c+=1
if ((counter_text)%6)==0 and counter_text > 0:
text_arr = text_arr[:-3]
else:
text_arr = text_arr[:-2]
text_arr+= " ]\n\n"
return text_arr
def tokens_to_bin_3w(ta, tb, da, db, old_db, uta2,uta1,utb2,utb1):
print("write tokens_to_bin_3w ...")
nta = np.array(ta)
ntb = np.array(tb)
index_a = []
index_b = []
print("tokens to idx 1...")
pas_a = 0
pas_b = 0
for a in da:
tka = np.where(nta==a)[0]
if len(tka)>0:
index_a.append(tka[0])
else:
pas_a+=1
#index_a.append(0)
for a in db:
tka = np.where(ntb==a)[0]
if len(tka)>0:
index_b.append(tka[0])
else:
pas_b+=1
#index_b.append(0)
print("pas_a "+str(pas_a))
print("pas_b "+str(pas_b))
bits_left = bit_length(len(ta))
bits_right = bit_length(len(tb))
print("")
print("bits l r "+str(bits_left)+" "+str(bits_right))
print("")
idx_x_0=np.array(index_a)
idx_x_1=np.array(index_b)
print("find conn again...")
res_a = []
for a in range(len(ta)):
tk0 = np.where(idx_x_0==a)[0]
tar = []
for b in tk0.tolist():
if b<len(idx_x_1):
tar.append(idx_x_1[b])
res_a.append(tar)
tmp_bts_len_22 = 0
for a in res_a:
for b in a:
tmp_bts_len_22+=bit_length(b)
print("")
print("tmp_bts_len_22 "+str(tmp_bts_len_22))
print("")
print("")
print("res_a len " + str(len(res_a)))
print("")
#divide = 256
divide = 8
#divide = 4
dvdr = int(len(tb)/divide)
bits_rnge = bit_length(dvdr-1)
print("")
print("dvdr "+str(dvdr))
print("bits_rnge "+str(bits_rnge))
print("(1<<bits_rnge) "+str((1<<bits_rnge)))
print("")
tidx_new = []
for x in range(int(len(tb)/dvdr)):
tidx_new.append([])
for x in range(int(len(tb)/dvdr)):
for c in res_a:
tidx = []
for d in c:
if d>=(1<<bits_rnge)*x and d<(1<<bits_rnge)+(1<<bits_rnge)*x:
tidx.append(d%(1<<bits_rnge))
tidx_new[x].append(tidx)
max_in_tidx_new = 0
for a in tidx_new:
for b in a:
for c in b:
if c>max_in_tidx_new:
max_in_tidx_new=c
num_bts_max_in_tidx_new=0
for a in tidx_new:
for b in a:
for c in b:
if bit_length(c)>=bit_length(max_in_tidx_new):
num_bts_max_in_tidx_new+=1
print("")
print("max_in_tidx_new " + str(max_in_tidx_new))
print("num_bts_max_in_tidx_new " + str(num_bts_max_in_tidx_new))
print("")
print("res: ")
print("")
print("tidx_new len " + str(len(tidx_new)))
print(tidx_new[len(tidx_new)-1][len(tidx_new[len(tidx_new)-1])-1])
tbits_ctr = 0
for a in tidx_new:
for b in a:
for c in b:
tbits_ctr+=bits_rnge
print("tbits_ctr")
print(tbits_ctr)
#return
print("")
print("test_decomp")
print("")
#######TEST RESTORE ARR
new_map = []
old_ta=ta
old_tb=tb
old_bits_l = bits_rnge
bits_num = (1<<bits_rnge)
new_tidx = tidx_new
print("old_ta old_tb len "+str(len(old_ta))+" "+str(len(old_tb)))
for a in range(len(new_tidx)):
tar_a = new_tidx[a] # [group[tokens[token_con[]]]]
for b in range(len(tar_a)):
tar_b = tar_a[b]
for c in tar_b:
new_map.append(ta[b]+tb[c+a*bits_num])
print("new_map len " + str(len(new_map)))
#print(new_map[1000])
print("TEST")
new_na = np.array(new_map)
old_na = np.array(old_db)
for a in new_map:
if len(np.where(old_na==a)[0])!=1:
print("not found new_map " + a)
print("np.where(new_na==a)[0] " + str(np.where(old_na==a)[0]))
exit(1)
for a in old_db:
if len(np.where(new_na==a)[0])!=1:
print("not found old_db " + a)
print("np.where(new_na==a)[0] " + str(np.where(new_na==a)[0]))
exit(1)
print("all true")
print("continue")
with open('data/tokens_left.txt', 'w') as f:
for line in ta:
f.write(line)
f.write('\n')
with open('data/tokens_right.txt', 'w') as f:
for line in tb:
f.write(line)
f.write('\n')
with open('data/original_db.txt', 'w') as f:
for line in old_db:
f.write(line)
f.write('\n')
with open('data/sorted_db.txt', 'w') as f:
for line in new_map:
f.write(line)
f.write('\n')
with open('data/sorted_db_single_line.txt', 'w') as f:
for line in new_map:
f.write(line)
#return
# TOP IS CORRECT
# SAVE ARRAY SIZE
size_full_size = len(tidx_new)
size_page_elem_size = []
size_num_pages = []
for a in tidx_new:
size_num_pages.append(len(a))
for a in range(len(tidx_new)):
tszx=[]
for b in tidx_new[a]:
tszx.append(len(b))
size_page_elem_size.append(tszx)
print("")
print("size_full_size ")
print(size_full_size)
#print("size_page_elem_size ")
#print(size_page_elem_size)
print("size_page_elem_size len " + str(len(size_page_elem_size)))
print("size_page_elem_size[0] len " + str(len(size_page_elem_size[0])))
print("size_page_elem_size[last] len " + str(len(size_page_elem_size[len(size_page_elem_size)-1])))
#print("size_page_elem_size " + str(size_page_elem_size[15]))
print("size_num_pages ")
print(size_num_pages)
print("max in size_page_elem_size...")
ts_bitx_max = 0
for a in size_page_elem_size:
for b in a:
if b>ts_bitx_max:
ts_bitx_max=b
num_bts_sz=0
for a in size_page_elem_size:
for b in a:
if bit_length(b)>=bit_length(ts_bitx_max):
num_bts_sz+=1
print("max size_page_elem_size "+str(ts_bitx_max)+" bits "+str(bit_length(ts_bitx_max)))
old_sz_m = bit_length(ts_bitx_max)*len(size_page_elem_size[0])*len(size_page_elem_size)
print("total bits "+str(old_sz_m))
print("num_bts_sz "+str(num_bts_sz))
size_page_elem_size_bits = bit_length(ts_bitx_max)
print("")
print("Removing numb with bit length from map "+str(bit_length(ts_bitx_max)))
tmps_size_page_elem_size = []
TMP_size_page_elem_size = []
for a in size_page_elem_size:
TMP_size_page_elem_size.append(a.copy())
TMP_size_page_elem_size[0][0]=0
for a in range(len(TMP_size_page_elem_size)):
for b in range(len(TMP_size_page_elem_size[a])):
if bit_length(TMP_size_page_elem_size[a][b])>=bit_length(ts_bitx_max):
tmps_size_page_elem_size.append([TMP_size_page_elem_size[a][b],a,b])
TMP_size_page_elem_size[a][b]=0
#print(tmps_size_page_elem_size)
print("")
print("Recalculating map bits length..")
ts_bitx_max = 0
for a in TMP_size_page_elem_size:
for b in a:
if b>ts_bitx_max:
ts_bitx_max=b
ts_bitx_max_a = 0
ts_bitx_max_b = 0
ts_bitx_max_c = 0
for a in tmps_size_page_elem_size:
if a[0]>ts_bitx_max_a:
ts_bitx_max_a=a[0]
if a[1]>ts_bitx_max_b:
ts_bitx_max_b=a[1]
if a[2]>ts_bitx_max_c:
ts_bitx_max_c=a[2]
print("NEW max TMP_size_page_elem_size "+str(ts_bitx_max)+" bits "+str(bit_length(ts_bitx_max)))
new_sz_m=bit_length(ts_bitx_max)*len(TMP_size_page_elem_size[0])*len(TMP_size_page_elem_size)
print("NEW total bits "+str(new_sz_m))
tsize_t=bit_length(ts_bitx_max_a)*len(tmps_size_page_elem_size)+bit_length(ts_bitx_max_b)*len(tmps_size_page_elem_size)+bit_length(ts_bitx_max_c)*len(tmps_size_page_elem_size)
print("NEW tmps_size_page_elem_size bits size "+str(tsize_t))
print("NEW save bits size "+str(old_sz_m-(new_sz_m+tsize_t)))
#return
print("")
print("save to shadertoy format...")
t_map_uint = []
py_t_map_uint = []
idx_c = 0
#test_a = to_bits([0,1,2,3,0,1,2,3,1,3,2,1,2,3,4,1,2,3,4,5,1,2,127],bits_rnge)
#t_map_uint.append(array_to_text_uint_enc(test_a,"map_uint_"+str(idx_c)))
print("save to file...")
tmp_bin_a = []
for a in tidx_new:
tar_nw = []
for b in a:
for c in b:
tar_nw.append(c)
tmp_bin_a.append(to_bits(tar_nw,bits_rnge))
t_map_uint.append(array_to_text_uint_enc(to_bits(tar_nw,bits_rnge),"map_uint_"+str(idx_c)))
py_t_map_uint.append(py_array_to_text_uint_enc(to_bits(tar_nw,bits_rnge),"map_uint_"+str(idx_c)))
idx_c+=1
with open("compressed/connections_idx.bin", "wb") as ifile:
for b in tmp_bin_a:
for byte in b:
ifile.write(byte.to_bytes(1, byteorder='big'))
idx_c = 0
t_map_map_uint = []
py_t_map_map_uint = []
tmp_bin_a = []
for a in size_page_elem_size:
tmp_bin_a.append(to_bits(a,size_page_elem_size_bits))
t_map_map_uint.append(array_to_text_uint_enc(to_bits(a,size_page_elem_size_bits),"map_map_uint_"+str(idx_c)))
py_t_map_map_uint.append(py_array_to_text_uint_enc(to_bits(a,size_page_elem_size_bits),"map_map_uint_"+str(idx_c)))
idx_c+=1
with open("compressed/connections_idx_map.bin", "wb") as ifile:
for b in tmp_bin_a:
for byte in b:
ifile.write(byte.to_bytes(1, byteorder='big'))
mapx = []
for a in uta2:
mapx.append(ord(a[0]))
mapx.append(ord(a[1]))
tuta2=array_to_text_uint_enc(mapx,"map_ta2")
py_tuta2=py_array_to_text_uint_enc(mapx,"map_ta2")
mapx = []
for a in uta1:
mapx.append(ord(a))
tuta1=array_to_text_uint_enc(mapx,"map_ta1")
py_tuta1=py_array_to_text_uint_enc(mapx,"map_ta1")
mapx = []
for a in utb2:
mapx.append(ord(a[0]))
mapx.append(ord(a[1]))
tutb2=array_to_text_uint_enc(mapx,"map_tb2")
py_tutb2=py_array_to_text_uint_enc(mapx,"map_tb2")
mapx = []
for a in utb1:
mapx.append(ord(a))
tutb1=array_to_text_uint_enc(mapx,"map_tb1")
py_tutb1=py_array_to_text_uint_enc(mapx,"map_tb1")
with open('shadertoy_out.txt', 'w') as f:
for a in t_map_uint:
f.write(a)
f.write('\n')
for a in t_map_map_uint:
f.write(a)
f.write('\n')
f.write(tuta2)
f.write(tuta1)
f.write(tutb2)
f.write(tutb1)
f.write('\n')
with open('shadertoy_out_python.txt', 'w') as f:
for a in py_t_map_uint:
f.write(a)
f.write('\n')
for a in py_t_map_map_uint:
f.write(a)
f.write('\n')
f.write(py_tuta2)
f.write(py_tuta1)
f.write(py_tutb2)
f.write(py_tutb1)
f.write('\n')
return
def bit_length(n): # return the bit size of a non-negative integer
bits = 0
while n >> bits: bits += 1
return bits
def main():
#test_raad_write()
download_ids()
process_json()
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment