Skip to content

Commit abc9b53

Browse files
committedJul 7, 2023
updates on code
1 parent 2256a59 commit abc9b53

20 files changed

+1501
-164
lines changed
 

‎.ipynb_checkpoints/michelle-checkpoint.ipynb

+698-10
Large diffs are not rendered by default.

‎Figures/1dhist.png

64 KB
Loading

‎Figures/2dhist.png

32.5 KB
Loading
51.5 KB
Loading
55.6 KB
Loading
Loading
Loading
Loading
Loading
54.1 KB
Loading
56.4 KB
Loading

‎__pycache__/cover.cpython-310.pyc

0 Bytes
Binary file not shown.
2.24 KB
Binary file not shown.
1.36 KB
Binary file not shown.

‎cover.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -46,11 +46,11 @@ def generateGridLines(self, n=100):
4646

4747
def generateEvenGrid(self, n=100):
4848

49-
ycoor = 25
49+
ycoor = 25 #set to radius of outermost layer when list of radii is avaliable
5050
xcoor = np.linspace(-self.env.top_layer_lim, self.env.top_layer_lim, n)
5151

5252
slopes = ycoor/(xcoor-self.start)
53-
return [Line(self.env, self.start, slope) for slope in slopes]
53+
return [Line(self.env, self.start, slope) for slope in slopes]
5454

5555
def generateRandomLines(self, n=100):
5656

‎messingaround.py

+16-3
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
cover = Cover(env, data)
2727
cover.solve(z0 = 0, lining='solveS', n = 16, show = True)
2828
cover.plot()
29-
'''
29+
3030
3131
env = Environment()
3232
data = DataSet(env, n_points=150, equal_spacing = True)
@@ -38,7 +38,7 @@
3838
cover.plot()
3939
#cover.plot()
4040
#data.plot()
41-
41+
'''
4242

4343
'''
4444
@@ -147,6 +147,8 @@
147147
#wedge_test(lining = 'solveS_reverse', solve_at = 0, n =16, z0 = np.arange(-15, 15.5, 0.5), wedges = [0,30], savefig = False, v = 'v3')
148148
#wedge_test(lining = 'solveQ', solve_at = [-10,0,10], n =16, z0 = np.arange(-15, 15.5, 0.5), wedges = [0,128], savefig = True, v = 'v3')
149149

150+
151+
'''
150152
def odd_loop(lining = 'solveS', v = 'v2'):
151153
accep = 0.0
152154
zvalues = 3
@@ -165,4 +167,15 @@ def even_loop(lining = 'solveS', v = 'v2'):
165167
166168
167169
#odd_loop('solveS_reverse', v = 'v3')
168-
#even_loop('solveS_reverse', v = 'v3')
170+
#even_loop('solveS_reverse', v = 'v3')
171+
172+
173+
174+
env = Environment()
175+
events = readFile('wedgeData_v2_128.txt', 10)
176+
wedge1 = convertToDataset(events[5])
177+
cover = wedgeCover(env, wedge1)
178+
cover.solve(lining = 'solveS_center2', z0 = 15)
179+
cover.plot()
180+
'''
181+
z099(lining = "solveS_reverse",wedges = [0,128], savefig=True, v = 'v2')

‎michelle.ipynb

+343-2
Large diffs are not rendered by default.

‎test_modules.py

+231-126
Original file line numberDiff line numberDiff line change
@@ -65,9 +65,9 @@ def wedge_test(lining:str = "solveS", solve_at = 0, z0 = np.arange(-15, 15.5, 0.
6565

6666
for i in range(len(test_lines)):
6767
for patch in cover.patches:
68-
if patch.contains(test_lines[i]):
68+
if patch.contains(test_lines[i]):
6969
percentage_accepted += 1
70-
break
70+
break
7171

7272

7373
percentage_accepted = percentage_accepted/lines
@@ -104,6 +104,235 @@ def wedge_test(lining:str = "solveS", solve_at = 0, z0 = np.arange(-15, 15.5, 0.
104104
plt.savefig(f"Figures/wedge_test({lining}_{v.replace('.','')}_{at}_n{n})")
105105
plt.show()
106106

107+
def z099(lining:str = "solveS", accept = 0.999, start = 'odd', n = 16, wedges = [0, 128], v = 'v3', savefig = False):
108+
solve_at = [0]
109+
real_solve = [0]
110+
reached = False
111+
z0 = np.arange(-15, 15.5, 0.5)
112+
lines = 1000
113+
left_index = 0
114+
right_index = 60
115+
last_left = 30
116+
last_right = 31
117+
rf = False
118+
lf = False
119+
120+
while reached == False:
121+
mean_list = np.zeros((len(z0), wedges[1]-wedges[0]))
122+
num_covers = []
123+
PRF = []
124+
file = f'wedgeData_{v}_128.txt'
125+
126+
with open(file) as f:
127+
for ik, k in enumerate(np.arange(wedges[0], wedges[1])):
128+
d = np.array(ast.literal_eval(f.readline()))
129+
env = Environment()
130+
data = DataSet(env = env)
131+
data.input_data(d, add = True)
132+
cover = Cover(env, data)
133+
cover.solve(z0 = solve_at, lining=lining, n = n, show = False)
134+
num_covers.append(cover.n_patches)
135+
out = []
136+
137+
for layer in range(env.layers):
138+
for point in data.array[layer]:
139+
140+
num_in = 0
141+
for patch in cover.patches:
142+
if patch.contains_p(point, layer):
143+
num_in += 1
144+
145+
out.append(num_in)
146+
PRF.append(out)
147+
148+
for iz, z in enumerate(np.array(z0)):
149+
percentage_accepted = 0
150+
151+
lg = LineGenerator(env, z)
152+
test_lines = lg.generateEvenGrid(lines)
153+
154+
for i in range(len(test_lines)):
155+
for patch in cover.patches:
156+
if patch.contains(test_lines[i]):
157+
percentage_accepted += 1
158+
break
159+
160+
161+
percentage_accepted = percentage_accepted/lines
162+
mean_list[iz, ik] = mean_list[iz, ik] + percentage_accepted
163+
z0_means = np.mean(mean_list, axis = 1)
164+
if np.all(z0_means > accept):
165+
break
166+
if np.all(z0_means[left_index:last_left] > accept):
167+
real_solve = np.append(real_solve, [z0[left_index]])
168+
print('new left', z0[left_index])
169+
last_left = left_index
170+
left_index = -1
171+
if np.all(z0_means[last_right:right_index] > accept):
172+
real_solve = np.append(real_solve, [z0[right_index]])
173+
print('new right', z0[right_index])
174+
last_right = right_index
175+
right_index = 61
176+
left_index +=1
177+
right_index -=1
178+
real_solve = np.unique(real_solve)
179+
solve_at = np.copy(real_solve)
180+
181+
solve_at = np.append(solve_at, z0[left_index])
182+
solve_at = np.append(solve_at,z0[right_index])
183+
solve_at = np.unique(solve_at)
184+
print(left_index)
185+
print(right_index)
186+
print(np.sort(real_solve))
187+
188+
ymin = accept
189+
mean_num = format(np.mean(num_covers), ".1f")
190+
std_num = format(np.std(num_covers), ".1f")
191+
192+
plt.scatter(z0, np.mean(mean_list, axis = 1), color = 'r', s = 10)
193+
plt.plot(z0, np.mean(mean_list, axis = 1), color = 'k')
194+
plt.xlabel('z0 offset [cm]', fontsize = 16)
195+
plt.ylabel('Acceptance', fontsize = 16)
196+
plt.ylim(ymin, 1.0)
197+
plt.title(f'{lining}', fontsize = 16)
198+
PRFm = format(np.mean(out), '.2f')
199+
PRFs = format(np.std(out), '.2f')
200+
plt.legend([f"Number of Patches: {mean_num}" + r'$\pm$' + f"{std_num}\nPoint Repetition Factor: {PRFm}" + r'$\pm$' + f"{PRFs}\nPatches with " + r'$z_0$' + f" = {np.sort(np.round(np.array(solve_at), 2), axis = 0)}\nppl = {n}, " + r'$N_{wedges}$ ' + f"= {wedges[1]}, {v} events"],
201+
loc = 8, fontsize = 12)
202+
if savefig == True:
203+
try:
204+
at = len(solve_at)
205+
except:
206+
at = solve_at
207+
plt.savefig(f"Figures/minimal_z0_odd_({lining}_{v.replace('.','')}_n{n})")
208+
plt.show()
209+
210+
211+
def wedge_test_old(lining:str = "solveS", solve_at = 0, z0 = np.arange(-15, 15.5, 0.5), n = 16, wedges = [0, 128], lines=1000, savefig=False, v = 'v2'):
212+
mean_list = np.zeros((len(z0), wedges[1]-wedges[0]))
213+
num_covers = []
214+
PRF = []
215+
file = f'wedgeData_{v}_128.txt'
216+
217+
with open(file) as f:
218+
for ik, k in enumerate(np.arange(wedges[0], wedges[1])):
219+
d = np.array(ast.literal_eval(f.readline()))
220+
env = Environment()
221+
data = DataSet(env = env, n_points = 150)
222+
data.input_data(d, add = True)
223+
cover = Cover(env, data)
224+
cover.solve(z0 = solve_at, lining=lining, n = n, show = False)
225+
#data.plot(True)
226+
num_covers.append(cover.n_patches)
227+
out = []
228+
229+
for layer in range(env.layers):
230+
for point in data.array[layer]:
231+
232+
num_in = 0
233+
for patch in cover.patches:
234+
if patch.contains_p(point, layer):
235+
num_in += 1
236+
237+
out.append(num_in)
238+
PRF.append(out)
239+
240+
for iz, z in enumerate(np.array(z0)):
241+
percentage_accepted = 0
242+
243+
lg = LineGenerator(env, z)
244+
test_lines = lg.generateEvenGrid(lines)
245+
246+
for i in range(len(test_lines)):
247+
for patch in cover.patches:
248+
if patch.contains(test_lines[i]):
249+
percentage_accepted += 1
250+
break
251+
252+
253+
percentage_accepted = percentage_accepted/lines
254+
mean_list[iz, ik] = mean_list[iz, ik] + percentage_accepted
255+
#print(ik)
256+
mean_accept = format(np.mean(mean_list), ".3f")
257+
mean_num = format(np.mean(num_covers), ".1f")
258+
std_num = format(np.std(num_covers), ".1f")
259+
if type(solve_at) == float:
260+
ymin = 0
261+
elif type(solve_at) == int:
262+
ymin = 0
263+
else:
264+
ymin = 0.90
265+
266+
plt.scatter(z0, np.mean(mean_list, axis = 1), color = 'r', s = 10)
267+
plt.plot(z0, np.mean(mean_list, axis = 1), color = 'k')
268+
plt.xlabel('z0 offset [cm]', fontsize = 16)
269+
plt.ylabel('Acceptance', fontsize = 16)
270+
plt.ylim(ymin, 1.0)
271+
plt.title(f'{lining}', fontsize = 16)
272+
PRFm = format(np.mean(out), '.2f')
273+
PRFs = format(np.std(out), '.2f')
274+
plt.legend([f"Number of Patches: {mean_num}" + r'$\pm$' + f"{std_num}\nPoint Repetition Factor: {PRFm}" + r'$\pm$' + f"{PRFs}\nPatches with " + r'$z_0$' + f" = {np.round(np.array(solve_at), 2)}\nppl = {n}, " + r'$N_{wedges}$ ' + f"= {wedges[1]}, {v} events"],
275+
loc = 8, fontsize = 12)
276+
if savefig == True:
277+
try:
278+
at = len(solve_at)
279+
except:
280+
at = solve_at
281+
plt.savefig(f"Figures/wedge_test({lining}_{v.replace('.','')}_{at}_n{n})")
282+
if np.mean(np.mean(mean_list, axis = 1)) > 0.999:
283+
plt.show()
284+
return np.mean(np.mean(mean_list, axis = 1))
285+
else:
286+
plt.clf()
287+
return np.mean(np.mean(mean_list, axis = 1))
288+
289+
def wedgeSlopePlot(lining:str = "solveS", events=128, lines=1000, z0 = 0, savefig=False, show = True, v = 'v2'):
290+
291+
percentage_accepted = [0 for _ in range(lines)]
292+
293+
294+
file = open(f'wedgeData_{v}_128.txt')
295+
''' for i in range(7):
296+
line = file.readline()'''
297+
for k in range(events):
298+
line = file.readline()
299+
env = Environment()
300+
data = DataSet(env, n_points=150)
301+
d = np.array(ast.literal_eval(line))
302+
data.input_data(d, add = True)
303+
cover = Cover(env, data)
304+
cover.solve(lining=lining, z0 = z0, show = False)
305+
306+
lg = LineGenerator(env, z0)
307+
test_lines = lg.generateEvenGrid(lines)
308+
co_tan = []
309+
310+
311+
for i in range(len(test_lines)):
312+
co_tan.append(100/test_lines[i].slope)
313+
for patch in cover.patches:
314+
if patch.contains(test_lines[i]):
315+
percentage_accepted[i] += 1
316+
break
317+
318+
319+
percentage_accepted = [x / events for x in percentage_accepted]
320+
mean_accept = format(np.mean(percentage_accepted), ".3f")
321+
322+
if show == False:
323+
return np.mean(percentage_accepted)
324+
325+
print(f"({lining}) - {mean_accept}")
326+
plt.plot(co_tan, percentage_accepted, c="b", label = "Mean acceptance: "+mean_accept)
327+
328+
plt.title(f"Acceptance Rate ({lining})", fontsize = '20')
329+
plt.xlabel("dZ/dr", fontsize = '16')
330+
plt.ylabel("Acceptance Probability", fontsize = '16')
331+
plt.legend(fontsize = '16')
332+
if savefig == True:
333+
plt.savefig(f"Figures/Acceptance_Rate_({lining})")
334+
plt.show()
335+
107336
def numCovers(clustering:str = "", lining:str = "solveS", events=1000, savefig=False, ideal=False):
108337
# Runs a bunch of iterations by generating 1000 datasets and
109338
# computing the cover. Then, it just looks at how many covers is
@@ -341,127 +570,3 @@ def duplicates(lining:str = "solveS", z0 = 0, events=1000, ideal=False):
341570

342571
print(f'{lining} - mean: {np.mean(dupes)} std: {np.std(dupes)}')
343572

344-
def wedge_test_old(lining:str = "solveS", solve_at = 0, z0 = np.arange(-15, 15.5, 0.5), n = 16, wedges = [0, 128], lines=1000, savefig=False, v = 'v2'):
345-
mean_list = np.zeros((len(z0), wedges[1]-wedges[0]))
346-
num_covers = []
347-
PRF = []
348-
file = f'wedgeData_{v}_128.txt'
349-
350-
with open(file) as f:
351-
for ik, k in enumerate(np.arange(wedges[0], wedges[1])):
352-
d = np.array(ast.literal_eval(f.readline()))
353-
env = Environment()
354-
data = DataSet(env = env, n_points = 150)
355-
data.input_data(d, add = True)
356-
cover = Cover(env, data)
357-
cover.solve(z0 = solve_at, lining=lining, n = n, show = False)
358-
#data.plot(True)
359-
num_covers.append(cover.n_patches)
360-
out = []
361-
362-
for layer in range(env.layers):
363-
for point in data.array[layer]:
364-
365-
num_in = 0
366-
for patch in cover.patches:
367-
if patch.contains_p(point, layer):
368-
num_in += 1
369-
370-
out.append(num_in)
371-
PRF.append(out)
372-
373-
for iz, z in enumerate(np.array(z0)):
374-
percentage_accepted = 0
375-
376-
lg = LineGenerator(env, z)
377-
test_lines = lg.generateEvenGrid(lines)
378-
379-
for i in range(len(test_lines)):
380-
for patch in cover.patches:
381-
if patch.contains(test_lines[i]):
382-
percentage_accepted += 1
383-
break
384-
385-
386-
percentage_accepted = percentage_accepted/lines
387-
mean_list[iz, ik] = mean_list[iz, ik] + percentage_accepted
388-
#print(ik)
389-
mean_accept = format(np.mean(mean_list), ".3f")
390-
mean_num = format(np.mean(num_covers), ".1f")
391-
std_num = format(np.std(num_covers), ".1f")
392-
if type(solve_at) == float:
393-
ymin = 0
394-
elif type(solve_at) == int:
395-
ymin = 0
396-
else:
397-
ymin = 0.90
398-
399-
plt.scatter(z0, np.mean(mean_list, axis = 1), color = 'r', s = 10)
400-
plt.plot(z0, np.mean(mean_list, axis = 1), color = 'k')
401-
plt.xlabel('z0 offset [cm]', fontsize = 16)
402-
plt.ylabel('Acceptance', fontsize = 16)
403-
plt.ylim(ymin, 1.0)
404-
plt.title(f'{lining}', fontsize = 16)
405-
PRFm = format(np.mean(out), '.2f')
406-
PRFs = format(np.std(out), '.2f')
407-
plt.legend([f"Number of Patches: {mean_num}" + r'$\pm$' + f"{std_num}\nPoint Repetition Factor: {PRFm}" + r'$\pm$' + f"{PRFs}\nPatches with " + r'$z_0$' + f" = {np.round(np.array(solve_at), 2)}\nppl = {n}, " + r'$N_{wedges}$ ' + f"= {wedges[1]}, {v} events"],
408-
loc = 8, fontsize = 12)
409-
if savefig == True:
410-
try:
411-
at = len(solve_at)
412-
except:
413-
at = solve_at
414-
plt.savefig(f"Figures/wedge_test({lining}_{v.replace('.','')}_{at}_n{n})")
415-
if np.mean(np.mean(mean_list, axis = 1)) > 0.999:
416-
plt.show()
417-
return np.mean(np.mean(mean_list, axis = 1))
418-
else:
419-
plt.clf()
420-
return np.mean(np.mean(mean_list, axis = 1))
421-
422-
def wedgeSlopePlot(lining:str = "solveS", events=128, lines=1000, z0 = 0, savefig=False, show = True, v = 'v2'):
423-
424-
percentage_accepted = [0 for _ in range(lines)]
425-
426-
427-
file = open(f'wedgeData_{v}_128.txt')
428-
''' for i in range(7):
429-
line = file.readline()'''
430-
for k in range(events):
431-
line = file.readline()
432-
env = Environment()
433-
data = DataSet(env, n_points=150)
434-
d = np.array(ast.literal_eval(line))
435-
data.input_data(d, add = True)
436-
cover = Cover(env, data)
437-
cover.solve(lining=lining, z0 = z0, show = False)
438-
439-
lg = LineGenerator(env, z0)
440-
test_lines = lg.generateEvenGrid(lines)
441-
co_tan = []
442-
443-
444-
for i in range(len(test_lines)):
445-
co_tan.append(100/test_lines[i].slope)
446-
for patch in cover.patches:
447-
if patch.contains(test_lines[i]):
448-
percentage_accepted[i] += 1
449-
break
450-
451-
452-
percentage_accepted = [x / events for x in percentage_accepted]
453-
mean_accept = format(np.mean(percentage_accepted), ".3f")
454-
455-
if show == False:
456-
return np.mean(percentage_accepted)
457-
458-
print(f"({lining}) - {mean_accept}")
459-
plt.plot(co_tan, percentage_accepted, c="b", label = "Mean acceptance: "+mean_accept)
460-
461-
plt.title(f"Acceptance Rate ({lining})", fontsize = '20')
462-
plt.xlabel("dZ/dr", fontsize = '16')
463-
plt.ylabel("Acceptance Probability", fontsize = '16')
464-
plt.legend(fontsize = '16')
465-
if savefig == True:
466-
plt.savefig(f"Figures/Acceptance_Rate_({lining})")
467-
plt.show()

‎triplets.py

+194
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,194 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
from data import *
4+
from cover import *
5+
from test_modules import *
6+
import math
7+
import cv2
8+
import os
9+
import glob
10+
import matplotlib
11+
import ast
12+
import time
13+
from reader import *
14+
from converter import *
15+
from wedgecover import *
16+
import matplotlib as mpl
17+
18+
def create_triplets(version = 'v2', random = False, anchor_layer = 1):
19+
if (anchor_layer < 1) or (anchor_layer > 3):
20+
raise('Anchor layer must not be the top or bottom layer')
21+
env = Environment()
22+
events = readFile(f'wedgeData_{version}_128.txt', 128)
23+
tripletsz = []
24+
tripletsphi = []
25+
hitres = 0.005 #50 microns
26+
for i in range(len(events)):
27+
wedge1 = convertToDataset(events[i])
28+
cover = wedgeCover(env, wedge1)
29+
cover.solveS_center2()
30+
for patch1 in cover.patches:
31+
32+
if random == True:
33+
layer1 = np.random.rand(16) * hitres
34+
layer2 = np.random.rand(16) * hitres
35+
layer3 = np.random.rand(16) * hitres
36+
for point2 in layer2:
37+
for point1 in layer1:
38+
for point3 in layer3:
39+
tripletsz.append([point1, point2, point3])
40+
tripletsphi.append([point1/15, point2/20, point3/25])
41+
42+
else:
43+
layer1 = patch1.superpoints[anchor_layer - 1]
44+
layer2 = patch1.superpoints[anchor_layer]
45+
layer3 = patch1.superpoints[anchor_layer + 1]
46+
47+
for point2 in layer2.points:
48+
for point1 in layer1.points:
49+
for point3 in layer3.points:
50+
tripletsz.append([point1.z, point2.z, point3.z])
51+
phi1 = point1.phi
52+
phi2 = point2.phi
53+
phi3 = point3.phi
54+
if ((point1.phi - point2.phi) > np.pi):
55+
phi1 = point1.phi-(2*np.pi)
56+
if ((point2.phi - point1.phi) > np.pi):
57+
phi2 = point2.phi-(2*np.pi)
58+
if ((point1.phi - point3.phi) > np.pi):
59+
phi1 = point1.phi-(2*np.pi)
60+
if ((point3.phi - point1.phi) > np.pi):
61+
phi3 = point3.phi-(2*np.pi)
62+
if ((point2.phi - point3.phi) > np.pi):
63+
phi2 = point2.phi-(2*np.pi)
64+
if ((point3.phi - point2.phi) > np.pi):
65+
phi3 = point3.phi-(2*np.pi)
66+
tripletsphi.append([phi1, phi2, phi3])
67+
return np.array([np.array(tripletsz), np.array(tripletsphi)])
68+
69+
def second_deriv(triplets):
70+
term1 = triplets[:,:,2]/(10-15)/(5-15)
71+
term2 = triplets[:,:,1]/(15-10)/(5-10)
72+
term3 = triplets[:,:,0]/(15-5)/(10-5)
73+
return 2*(term1+term2+term3)
74+
75+
def ratio(data, z_cutoff = -3, phi_cutoff = -4):
76+
z = np.log10(np.abs(second_deriv(data)))[0]
77+
phi = np.log10(np.abs(second_deriv(data)))[1]
78+
z_ratio = 0
79+
phi_ratio = 0
80+
both_ratio = 0
81+
for i in range(len(z)):
82+
if (z[i] <= z_cutoff):
83+
z_ratio += 1
84+
if(phi[i] <= phi_cutoff):
85+
phi_ratio += 1
86+
if (z[i] <= z_cutoff)&(phi[i] <= phi_cutoff):
87+
both_ratio += 1
88+
z_ratio = z_ratio/len(z)
89+
phi_ratio = phi_ratio/len(z)
90+
both_ratio = both_ratio/len(z)
91+
return [z_ratio, phi_ratio, both_ratio]
92+
93+
triplets_rand = create_triplets(random = True, anchor_layer = 3)
94+
triplets_v2 = create_triplets('v2', anchor_layer = 3)
95+
triplets_v3 = create_triplets('v3', anchor_layer = 3)
96+
97+
log_second_deriv_rand = np.log10(np.abs(second_deriv(triplets_rand)))
98+
log_second_deriv_v2 = np.log10(np.abs(second_deriv(triplets_v2)))
99+
log_second_deriv_v3 = np.log10(np.abs(second_deriv(triplets_v3)))
100+
101+
rand_ratios = ratio(triplets_rand)
102+
v2_ratios = ratio(triplets_v2)
103+
v3_ratios = ratio(triplets_v3)
104+
105+
bins = [np.arange(-11, 1, .3), np.arange(-12, -1, 0.3)]
106+
plt.figure(figsize = (15, 10))
107+
108+
plt.subplot(2, 3, 1)
109+
plt.hist(log_second_deriv_rand[0], edgecolor='black', rwidth=0.8, bins = bins[0], label = f'Ratio: {np.round(rand_ratios[0], 5)}')
110+
plt.title('Random z')
111+
plt.ylabel('Count')
112+
plt.xlabel(r'$log_{10}$(|z"|)')
113+
plt.legend(loc = 'upper left')
114+
plt.yscale('log')
115+
plt.axvline(-3, ls = '--', color = 'r')
116+
117+
plt.subplot(2, 3, 2)
118+
plt.hist(log_second_deriv_v2[0], edgecolor='black', rwidth=0.8, bins =bins[0], label = f'Ratio: {np.round(v2_ratios[0], 5)}')
119+
plt.title('v2 z')
120+
plt.ylabel('Count')
121+
plt.xlabel(r'$log_{10}$(|z"|)')
122+
plt.legend(loc = 'upper left')
123+
plt.yscale('log')
124+
plt.axvline(-3, ls = '--', color = 'r')
125+
126+
plt.subplot(2, 3, 3)
127+
plt.hist(log_second_deriv_v3[0], edgecolor='black', rwidth=0.8, bins = bins[0], label = f'Ratio: {np.round(v3_ratios[0], 5)}')
128+
plt.title('v3 z')
129+
plt.ylabel('Count')
130+
plt.xlabel(r'$log_{10}$( |z"| )')
131+
plt.legend(loc = 'upper left')
132+
plt.yscale('log')
133+
plt.axvline(-3, ls = '--', color = 'r')
134+
135+
plt.subplot(2, 3, 4)
136+
plt.hist(log_second_deriv_rand[1], edgecolor='black', rwidth=0.8, bins = bins[1], label = f'Ratio: {np.round(rand_ratios[1], 5)}')
137+
plt.title('Random phi')
138+
plt.ylabel('Count')
139+
plt.xlabel(r'$log_{10}$(|$\phi$"|)')
140+
plt.legend(loc = 'upper left')
141+
plt.yscale('log')
142+
plt.axvline(-4, ls = '--', color = 'r')
143+
144+
plt.subplot(2, 3, 5)
145+
plt.hist(log_second_deriv_v2[1], edgecolor='black', rwidth=0.8, bins = bins[1], label = f'Ratio: {np.round(v2_ratios[1], 5)}')
146+
plt.title('v2 phi')
147+
plt.ylabel('Count')
148+
plt.xlabel(r'$log_{10}$(|$\phi$" |)')
149+
plt.legend(loc = 'upper left')
150+
plt.yscale('log')
151+
plt.axvline(-4, ls = '--', color = 'r')
152+
153+
plt.subplot(2, 3, 6)
154+
plt.hist(log_second_deriv_v3[1], edgecolor='black', rwidth=0.8, bins = bins[1], label = f'Ratio: {np.round(v3_ratios[1], 5)}')
155+
plt.title('v3 phi')
156+
plt.ylabel('Count')
157+
plt.xlabel(r'$log_{10}$(|$\phi$" |)')
158+
plt.legend(loc = 'upper left')
159+
plt.yscale('log')
160+
plt.axvline(-4, ls = '--', color = 'r')
161+
162+
ranges = [[-11, 1], [-12, -1]]
163+
vbound = (-4 - ranges[1][0])/(ranges[1][1]-ranges[1][0])
164+
hbound = (-3 - ranges[0][0])/(ranges[0][1]-ranges[0][0])
165+
plt.figure(figsize = (17, 5))
166+
plt.subplot(1, 3, 1)
167+
plt.hist2d(log_second_deriv_rand[0],log_second_deriv_rand[1],range = ranges, bins = 25, norm=mpl.colors.LogNorm())
168+
plt.title('Random')
169+
plt.xlabel(r'$log_{10}$(|z"|)')
170+
plt.ylabel(r'$log_{10}$(|$\phi$" |)')
171+
plt.axvline(-3, 0, vbound, ls = '--', color = 'r')
172+
plt.axhline(-4, 0, hbound, ls = '--', color = 'r')
173+
plt.text(ranges[0][0]+0.5, ranges[1][1]-0.7, f'Ratio: {np.round(rand_ratios[2], 5)}', bbox=dict(boxstyle="square", fc = 'None'))
174+
175+
plt.subplot(1, 3, 2)
176+
plt.hist2d(log_second_deriv_v2[0],log_second_deriv_v2[1],range = ranges, bins = 25, norm=mpl.colors.LogNorm())
177+
plt.title('v2')
178+
plt.xlabel(r'$log_{10}$(|z"|)')
179+
plt.ylabel(r'$log_{10}$(|$\phi$" |)')
180+
plt.axvline(-3, 0, vbound, ls = '--', color = 'r')
181+
plt.axhline(-4, 0, hbound, ls = '--', color = 'r')
182+
plt.text(ranges[0][0]+0.5, ranges[1][1]-0.7, f'Ratio: {np.round(v2_ratios[2], 7)}', bbox=dict(boxstyle="square", fc = 'None'))
183+
184+
plt.subplot(1, 3, 3)
185+
plt.hist2d(log_second_deriv_v3[0],log_second_deriv_v3[1], range = ranges, bins = 25, norm=mpl.colors.LogNorm())
186+
plt.title('v3')
187+
plt.xlabel(r'$log_{10}$(|z"|)')
188+
plt.ylabel(r'$log_{10}$(|$\phi$" |)')
189+
plt.colorbar()
190+
plt.axvline(-3, 0, vbound, ls = '--', color = 'r')
191+
plt.axhline(-4, 0, hbound, ls = '--', color = 'r')
192+
plt.text(ranges[0][0]+0.5, ranges[1][1]-0.7, f'Ratio: {np.round(v3_ratios[2], 7)}', bbox=dict(boxstyle="square", fc = 'None'))
193+
194+
plt.show()

‎wedgecover.py

+17-21
Original file line numberDiff line numberDiff line change
@@ -129,10 +129,6 @@ def solve(self, lining:str = "SolveS", z0=0, n = 16, nlines:int=100, show = True
129129
self.solveS_reverse(z0=z0, stop = -1, n = n)
130130
return
131131

132-
elif lining == "solveS_center1":
133-
self.solveS_center1(n = n)
134-
return
135-
136132
elif lining == "solveS_center2":
137133
try:
138134
for s in z0:
@@ -204,14 +200,16 @@ def S_loop15(self, z0 = 0, stop = 1, n = 16):
204200
Args:
205201
z0 (num, optional): Places to generate patch. Defaults to 0.
206202
stop (num, optional): stopping location, normalized to 1m. Defaults to 1.
207-
n (int, optional): points per layer per patch. Defaults to 16.
203+
n (int, optional): points per patch per layer. Defaults to 16.
208204
209205
Returns:
210206
function: reruns loop if it hasn't reached the end of the dataset
211207
"""
212208

213209
#count how many times this has been run
214210
loops = self.n_patches - 1
211+
#reads last patch made
212+
last_patch = self.patches[loops].superpoints
215213
#create list for points closest to starting line and patch ingredients
216214
mins = []
217215
patch_ingredients = []
@@ -221,8 +219,6 @@ def S_loop15(self, z0 = 0, stop = 1, n = 16):
221219
#loops through layers
222220
for i in range(5):
223221
y = 5*(i+1)
224-
#reads last patch made
225-
last_patch = self.patches[loops].superpoints
226222
#create compatible arrays from data structure
227223
row_data = last_patch[i].points
228224
row_list = np.array([row_data[x].z for x in range(len(row_data))])
@@ -234,11 +230,11 @@ def S_loop15(self, z0 = 0, stop = 1, n = 16):
234230
min_index = np.argmin(np.array(mins))
235231
min_value = (last_patch[min_index].points[n-1].z-z0)/(5*(min_index+1)/100)
236232

233+
#row_data[layer] gives spacepoints in layer
234+
row_data = self.data.array
237235
#loops through layers again
238236
for i in range(5):
239237
y = 5*(i+1)
240-
#create compatible array from data structure
241-
row_data = self.data.array
242238
row_list = np.array([row_data[i][x].z for x in range(len(row_data[i]))])
243239
#finds point closest to line from (z0, 0) to leftmost rescaled point
244240
closest_index = np.argmin(np.abs((row_list-z0)/(y/100) - min_value))
@@ -267,17 +263,16 @@ def S_loop15(self, z0 = 0, stop = 1, n = 16):
267263
#closest_index - 1 insures point is to left of line ie ensuring patches overlap
268264
patch_ingredients.append(wedgeSuperPoint(row_data[i][closest_index-1:closest_index + n - 1]))
269265

270-
#creates new patch
266+
#add superpoints to patch
271267
new_patch = wedgePatch(self.env, tuple(patch_ingredients))
272-
273-
#if all layers have points beyond stop index, add patch and stop
268+
#add patch to cover
269+
self.add_patch(new_patch)
270+
271+
#if all layers have points beyond stop index, stop
274272
if term == 5:
275-
self.add_patch(new_patch)
276273
return
277-
278-
#if new patches are still being created, add patch to cover instance and repeat loop
274+
#if new patches are still being created, repeat loop
279275
else:
280-
self.add_patch(new_patch)
281276
return self.S_loop15(z0, stop, n = n)
282277

283278
def S_rloop15(self, z0 = 0, stop = -1, n = 16):
@@ -367,29 +362,30 @@ def solveS(self, z0 = 0, stop = 1, n = 16):
367362
Args:
368363
z0 (num, optional): Places to generate patch. Defaults to 0.
369364
stop (num, optional): stopping location, normalized to 1m. Defaults to 1.
370-
n (int, optional): points per layer per patch. Defaults to 16.
365+
n (int, optional): points per patch per layer. Defaults to 16.
371366
372367
Returns:
373368
function: runs loop to make patches
374369
"""
375370
#create list for inital patch
376371
init_patch = []
377372

373+
#row_data[layer] contains spacepoints for each layer
374+
row_data = self.data.array
378375
#loops through each layer and picks n points closest to (z0, 0) and (-100, 25)
379376
for row in range(5):
380377
y = 5*(row + 1)
381378
#create compatible arrays from data structure
382-
row_data = self.data.array
383379
row_list = np.array([row_data[row][x].z for x in range(len(row_data[row]))])
384-
#picks picks n points closest to (z0, 0) and (-100, 25)
380+
#picks picks n points closest to line from (z0, 0) to (-100, 25)
385381
start_index = np.argmin(np.abs(row_list - (((-z0-100)*y)/25+z0)))
386382
#subtract one from stop index in case it is right of the line from (z0, 0) to (-100, 25)
387383
if start_index != 0:
388384
start_index -= 1
389-
#add superpoint
385+
#add superpoint to patch
390386
init_patch.append(wedgeSuperPoint(row_data[row][start_index:start_index+n]))
391387

392-
#add to patch
388+
#add patch to cover
393389
self.add_patch(wedgePatch(self.env, tuple(init_patch)))
394390

395391
#run main algorithm

0 commit comments

Comments
 (0)
Please sign in to comment.