swp2.py 29KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726
  1. import matplotlib.pyplot as plt
  2. import os
  3. import numpy as np
  4. import math
  5. import json
  6. def log_facto(k):
  7. """
  8. Using the Stirling's approximation
  9. """
  10. k = int(k)
  11. if k > 1e6:
  12. return k * np.log(k) - k + np.log(2*math.pi*k)/2
  13. val = 0
  14. for i in range(2, k+1):
  15. val += np.log(i)
  16. return val
  17. def parse_stwp_theta_file(stwp_theta_file, breaks, mu, tgen, relative_theta_scale = False):
  18. with open(stwp_theta_file, "r") as swp_file:
  19. # Read the first line
  20. line = swp_file.readline()
  21. L = float(line.split()[2])
  22. rands = swp_file.readline()
  23. line = swp_file.readline()
  24. # skip empty lines before SFS
  25. while line == "\n":
  26. line = swp_file.readline()
  27. sfs = np.array(line.split()).astype(float)
  28. # Process lines until the end of the file
  29. while line:
  30. # check at each line
  31. if line.startswith("dim") :
  32. dim = int(line.split()[1])
  33. if dim == breaks+1:
  34. likelihood = line.split()[5]
  35. groups = line.split()[6:6+dim]
  36. theta_site = line.split()[6+dim:6+dim+1+dim]
  37. elif dim < breaks+1:
  38. line = swp_file.readline()
  39. continue
  40. elif dim > breaks+1:
  41. break
  42. #return 0,0,0
  43. # Read the next line
  44. line = swp_file.readline()
  45. #### END of parsing
  46. # quit this file if the number of dimensions is incorrect
  47. if dim < breaks+1:
  48. return 0,0,0,0,0,0
  49. # get n, the last bin of the last group
  50. # revert the list of groups as the most recent times correspond
  51. # to the closest and last leafs of the coal. tree.
  52. groups = groups[::-1]
  53. theta_site = theta_site[::-1]
  54. # store thetas for later use
  55. grps = groups.copy()
  56. thetas = {}
  57. for i in range(len(groups)):
  58. grps[i] = grps[i].split(',')
  59. thetas[i] = [float(theta_site[i]), grps[i], likelihood]
  60. # initiate the dict of times
  61. t = {}
  62. # list of thetas
  63. theta_L = []
  64. sum_t = 0
  65. for group_nb, group in enumerate(groups):
  66. ###print(group_nb, group, theta_site[group_nb], len(theta_site))
  67. # store all the thetas one by one, with one theta per group
  68. theta_L.append(float(theta_site[group_nb]))
  69. # if the group is of size 1
  70. if len(group.split(',')) == 1:
  71. i = int(group)
  72. # if the group size is >1, take the first elem of the group
  73. # i is the first bin of each group, straight after a breakpoint
  74. else:
  75. i = int(group.split(",")[0])
  76. j = int(group.split(",")[-1])
  77. t[i] = 0
  78. #t =
  79. if len(group.split(',')) == 1:
  80. k = i
  81. if relative_theta_scale:
  82. t[i] += ((theta_L[group_nb] ) / (k*(k-1)))
  83. else:
  84. t[i] += ((theta_L[group_nb] ) / (k*(k-1)) * tgen) / mu
  85. else:
  86. for k in range(j, i-1, -1 ):
  87. if relative_theta_scale:
  88. t[i] += ((theta_L[group_nb] ) / (k*(k-1)))
  89. else:
  90. t[i] += ((theta_L[group_nb] ) / (k*(k-1)) * tgen) / mu
  91. # we add the cumulative times at the end
  92. t[i] += sum_t
  93. sum_t = t[i]
  94. # build the y axis (sizes)
  95. y = []
  96. for theta in theta_L:
  97. if relative_theta_scale:
  98. size = theta
  99. else:
  100. # with size N = theta/4mu
  101. size = theta / (4*mu)
  102. y.append(size)
  103. y.append(size)
  104. # build the time x axis
  105. x = [0]
  106. for time in range(0, len(t.values())-1):
  107. x.append(list(t.values())[time])
  108. x.append(list(t.values())[time])
  109. x.append(list(t.values())[len(t.values())-1])
  110. return x,y,likelihood,thetas,sfs,L
  111. def plot_straight_x_y(x,y):
  112. x_1 = [x[0]]
  113. y_1 = []
  114. for i in range(0, len(y)-1):
  115. x_1.append(x[i])
  116. x_1.append(x[i])
  117. y_1.append(y[i])
  118. y_1.append(y[i])
  119. y_1 = y_1+[y[-1],y[-1]]
  120. x_1.append(x[-1])
  121. return x_1, y_1
  122. def plot_all_epochs_thetafolder_old(folder_path, mu, tgen, title = "Title",
  123. theta_scale = True, ax = None, input = None, output = None):
  124. #scenari = {}
  125. cpt = 0
  126. epochs = {}
  127. for file_name in os.listdir(folder_path):
  128. breaks = 0
  129. cpt +=1
  130. if os.path.isfile(os.path.join(folder_path, file_name)):
  131. x, y, likelihood, theta, sfs, L = parse_stwp_theta_file(folder_path+file_name, breaks = breaks,
  132. tgen = tgen,
  133. mu = mu, relative_theta_scale = theta_scale)
  134. SFS_stored = sfs
  135. L_stored = L
  136. while not (x == 0 and y == 0):
  137. if breaks not in epochs.keys():
  138. epochs[breaks] = {}
  139. epochs[breaks][likelihood] = x,y
  140. breaks += 1
  141. x,y,likelihood,theta,sfs,L = parse_stwp_theta_file(folder_path+file_name, breaks = breaks,
  142. tgen = tgen,
  143. mu = mu, relative_theta_scale = theta_scale)
  144. if x == 0:
  145. # last break did not work, then breaks = breaks-1
  146. breaks -= 1
  147. print("\n*******\n"+title+"\n--------\n"+"mu="+str(mu)+"\ntgen="+str(tgen)+"\nbreaks="+str(breaks)+"\n*******\n")
  148. print(cpt, "theta file(s) have been scanned.")
  149. my_dpi = 300
  150. if ax is None:
  151. # intialize figure
  152. my_dpi = 300
  153. fnt_size = 18
  154. # plt.rcParams['font.size'] = fnt_size
  155. fig, ax1 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  156. else:
  157. fnt_size = 12
  158. # plt.rcParams['font.size'] = fnt_size
  159. ax1 = ax[1][0,0]
  160. ax1.set_yscale('log')
  161. ax1.set_xscale('log')
  162. ax1.grid(True,which="both", linestyle='--', alpha = 0.3)
  163. brkpt_lik = []
  164. top_plots = {}
  165. for epoch, scenari in epochs.items():
  166. # sort starting by the smallest -log(Likelihood)
  167. best10_scenari = (sorted(list(scenari.keys())))[:10]
  168. greatest_likelihood = best10_scenari[0]
  169. # store the tuple breakpoints and likelihood for later plot
  170. brkpt_lik.append((epoch, greatest_likelihood))
  171. x, y = scenari[greatest_likelihood]
  172. #without breakpoint
  173. if epoch == 0:
  174. # do something with the theta without bp and skip the plotting
  175. N0 = y[0]
  176. #continue
  177. for i in range(len(y)):
  178. # divide by N0
  179. y[i] = y[i]/N0
  180. x[i] = x[i]/N0
  181. top_plots[greatest_likelihood] = x,y,epoch
  182. plots_likelihoods = list(top_plots.keys())
  183. for i in range(len(plots_likelihoods)):
  184. plots_likelihoods[i] = float(plots_likelihoods[i])
  185. best10_plots = sorted(plots_likelihoods)[:10]
  186. top_plot_lik = str(best10_plots[0])
  187. plot_handles = []
  188. # plt.rcParams['font.size'] = fnt_size
  189. p0, = ax1.plot(top_plots[top_plot_lik][0], top_plots[top_plot_lik][1], 'o', linestyle = "-",
  190. alpha=1, lw=2, label = str(top_plots[top_plot_lik][2])+' brks | Lik='+top_plot_lik)
  191. plot_handles.append(p0)
  192. for k, plot_Lk in enumerate(best10_plots[1:]):
  193. plot_Lk = str(plot_Lk)
  194. # plt.rcParams['font.size'] = fnt_size
  195. p, = ax1.plot(top_plots[plot_Lk][0], top_plots[plot_Lk][1], 'o', linestyle = "--",
  196. alpha=1/(k+1), lw=1.5, label = str(top_plots[plot_Lk][2])+' brks | Lik='+plot_Lk)
  197. plot_handles.append(p)
  198. if theta_scale:
  199. ax1.set_xlabel("Coal. time", fontsize=fnt_size)
  200. ax1.set_ylabel("Pop. size scaled by N0", fontsize=fnt_size)
  201. # recent_scale_lower_bound = 0.01
  202. # recent_scale_upper_bound = 0.1
  203. # ax1.axvline(x=recent_scale_lower_bound)
  204. # ax1.axvline(x=recent_scale_upper_bound)
  205. else:
  206. # years
  207. plt.set_xlabel("Time (years)", fontsize=fnt_size)
  208. plt.set_ylabel("Individuals (N)", fontsize=fnt_size)
  209. # plt.rcParams['font.size'] = fnt_size
  210. # print(fnt_size, "rcParam font.size=", plt.rcParams['font.size'])
  211. ax1.legend(handles = plot_handles, loc='best', fontsize = fnt_size*0.5)
  212. ax1.set_title(title)
  213. if ax is None:
  214. plt.savefig(title+'_b'+str(breaks)+'.pdf')
  215. # plot likelihood against nb of breakpoints
  216. # best possible likelihood from SFS
  217. # Segregating sites
  218. S = sum(SFS_stored)
  219. # Number of kept sites from which the SFS is computed
  220. L = L_stored
  221. # number of monomorphic sites
  222. S0 = L-S
  223. # print("SFS", SFS_stored)
  224. # print("S", S, "L", L, "S0=", S0)
  225. # compute Ln
  226. Ln = log_facto(S+S0) - log_facto(S0) + np.log(float(S0)/(S+S0)) * S0
  227. for xi in range(0, len(SFS_stored)):
  228. p_i = SFS_stored[xi] / float(S+S0)
  229. Ln += np.log(p_i) * SFS_stored[xi] - log_facto(SFS_stored[xi])
  230. # basic plot likelihood
  231. if ax is None:
  232. fig, ax2 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  233. # plt.rcParams['font.size'] = fnt_size
  234. else:
  235. #plt.rcParams['font.size'] = fnt_size
  236. ax2 = ax[0][0,1]
  237. ax2.plot(np.array(brkpt_lik)[:, 0], np.array(brkpt_lik)[:, 1].astype(float), 'o', linestyle = "dotted", lw=2)
  238. ax2.axhline(y=-Ln, linestyle = "-.", color = "red", label = "$-\log\mathcal{L}$ = "+str(round(-Ln, 2)))
  239. ax2.set_yscale('log')
  240. ax2.set_xlabel("# breakpoints", fontsize=fnt_size)
  241. ax2.set_ylabel("$-\log\mathcal{L}$", fontsize=fnt_size)
  242. ax2.legend(loc='best', fontsize = fnt_size*0.5)
  243. ax2.set_title(title+" Likelihood gain from # breakpoints")
  244. if ax is None:
  245. plt.savefig(title+'_Breakpts_Likelihood.pdf')
  246. # AIC
  247. if ax is None:
  248. fig, ax3 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  249. # plt.rcParams['font.size'] = '18'
  250. else:
  251. #plt.rcParams['font.size'] = fnt_size
  252. ax3 = ax[1][0,1]
  253. AIC = []
  254. for brk in np.array(brkpt_lik)[:, 0]:
  255. brk = int(brk)
  256. AIC.append((2*brk+1)+2*np.array(brkpt_lik)[brk, 1].astype(float))
  257. ax3.plot(np.array(brkpt_lik)[:, 0], AIC, 'o', linestyle = "dotted", lw=2)
  258. # AIC = 2*k - 2ln(L) ; where k is the number of parameters, here brks+1
  259. AIC_ln = 2*(len(brkpt_lik)+1) - 2*Ln
  260. ax3.axhline(y=AIC_ln, linestyle = "-.", color = "red",
  261. label = "Min. AIC = "+str(round(AIC_ln, 2)))
  262. selected_brks_nb = AIC.index(min(AIC))
  263. ax3.set_yscale('log')
  264. ax3.set_xlabel("# breakpoints", fontsize=fnt_size)
  265. ax3.set_ylabel("AIC")
  266. ax3.legend(loc='best', fontsize = fnt_size*0.5)
  267. ax3.set_title(title+" AIC")
  268. if ax is None:
  269. plt.savefig(title+'_Breakpts_Likelihood_AIC.pdf')
  270. print("S", S)
  271. # return plots
  272. return ax[0], ax[1]
  273. def plot_all_epochs_thetafolder(full_dict, mu, tgen, title = "Title",
  274. theta_scale = True, ax = None, input = None, output = None):
  275. my_dpi = 300
  276. if ax is None:
  277. # intialize figure
  278. my_dpi = 300
  279. fnt_size = 18
  280. # plt.rcParams['font.size'] = fnt_size
  281. fig, ax1 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  282. else:
  283. fnt_size = 12
  284. # plt.rcParams['font.size'] = fnt_size
  285. ax1 = ax[1][0,0]
  286. ax1.set_yscale('log')
  287. ax1.set_xscale('log')
  288. ax1.grid(True,which="both", linestyle='--', alpha = 0.3)
  289. plot_handles = []
  290. best_plot = full_dict['all_epochs']['best']
  291. p0, = ax1.plot(best_plot[0], best_plot[1], 'o', linestyle = "-",
  292. alpha=1, lw=2, label = str(best_plot[2])+' brks | Lik='+best_plot[3])
  293. plot_handles.append(p0)
  294. for k, plot_Lk in enumerate(full_dict['all_epochs']['plots']):
  295. plot_Lk = str(full_dict['all_epochs']['plots'][k][3])
  296. # plt.rcParams['font.size'] = fnt_size
  297. p, = ax1.plot(full_dict['all_epochs']['plots'][k][0], full_dict['all_epochs']['plots'][k][1], 'o', linestyle = "--",
  298. alpha=1/(k+1), lw=1.5, label = str(full_dict['all_epochs']['plots'][k][2])+' brks | Lik='+plot_Lk)
  299. plot_handles.append(p)
  300. if theta_scale:
  301. ax1.set_xlabel("Coal. time", fontsize=fnt_size)
  302. ax1.set_ylabel("Pop. size scaled by N0", fontsize=fnt_size)
  303. # recent_scale_lower_bound = 0.01
  304. # recent_scale_upper_bound = 0.1
  305. # ax1.axvline(x=recent_scale_lower_bound)
  306. # ax1.axvline(x=recent_scale_upper_bound)
  307. else:
  308. # years
  309. plt.set_xlabel("Time (years)", fontsize=fnt_size)
  310. plt.set_ylabel("Individuals (N)", fontsize=fnt_size)
  311. # plt.rcParams['font.size'] = fnt_size
  312. # print(fnt_size, "rcParam font.size=", plt.rcParams['font.size'])
  313. ax1.legend(handles = plot_handles, loc='best', fontsize = fnt_size*0.5)
  314. ax1.set_title(title)
  315. if ax is None:
  316. plt.savefig(title+'_b'+str(breaks)+'.pdf')
  317. # plot likelihood against nb of breakpoints
  318. if ax is None:
  319. fig, ax2 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  320. # plt.rcParams['font.size'] = fnt_size
  321. else:
  322. #plt.rcParams['font.size'] = fnt_size
  323. ax2 = ax[0][0,1]
  324. ax2.plot(full_dict['Ln_Brks'][0], full_dict['Ln_Brks'][1], 'o', linestyle = "dotted", lw=2)
  325. ax2.axhline(y=full_dict['best_Ln'], linestyle = "-.", color = "red", label = "$-\log\mathcal{L}$ = "+str(round(full_dict['best_Ln'], 2)))
  326. ax2.set_yscale('log')
  327. ax2.set_xlabel("# breakpoints", fontsize=fnt_size)
  328. ax2.set_ylabel("$-\log\mathcal{L}$", fontsize=fnt_size)
  329. ax2.legend(loc='best', fontsize = fnt_size*0.5)
  330. ax2.set_title(title+" Likelihood gain from # breakpoints")
  331. if ax is None:
  332. plt.savefig(title+'_Breakpts_Likelihood.pdf')
  333. # AIC
  334. if ax is None:
  335. fig, ax3 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  336. # plt.rcParams['font.size'] = '18'
  337. else:
  338. #plt.rcParams['font.size'] = fnt_size
  339. ax3 = ax[1][0,1]
  340. AIC = full_dict['AIC_Brks']
  341. ax3.plot(AIC[0], AIC[1], 'o', linestyle = "dotted", lw=2)
  342. ax3.axhline(y=full_dict['best_AIC'], linestyle = "-.", color = "red",
  343. label = "Min. AIC = "+str(round(full_dict['best_AIC'], 2)))
  344. ax3.set_yscale('log')
  345. ax3.set_xlabel("# breakpoints", fontsize=fnt_size)
  346. ax3.set_ylabel("AIC")
  347. ax3.legend(loc='best', fontsize = fnt_size*0.5)
  348. ax3.set_title(title+" AIC")
  349. if ax is None:
  350. plt.savefig(title+'_Breakpts_Likelihood_AIC.pdf')
  351. # return plots
  352. return ax[0], ax[1]
  353. def save_all_epochs_thetafolder(folder_path, mu, tgen, title = "Title", theta_scale = True, input = None, output = None):
  354. #scenari = {}
  355. cpt = 0
  356. epochs = {}
  357. plots = {}
  358. # store ['best'], and [0] for epoch 0 etc...
  359. for file_name in os.listdir(folder_path):
  360. breaks = 0
  361. cpt +=1
  362. if os.path.isfile(os.path.join(folder_path, file_name)):
  363. x, y, likelihood, theta, sfs, L = parse_stwp_theta_file(folder_path+file_name, breaks = breaks,
  364. tgen = tgen,
  365. mu = mu, relative_theta_scale = theta_scale)
  366. SFS_stored = sfs
  367. L_stored = L
  368. while not (x == 0 and y == 0):
  369. if breaks not in epochs.keys():
  370. epochs[breaks] = {}
  371. epochs[breaks][likelihood] = x,y
  372. breaks += 1
  373. x,y,likelihood,theta,sfs,L = parse_stwp_theta_file(folder_path+file_name, breaks = breaks,
  374. tgen = tgen,
  375. mu = mu, relative_theta_scale = theta_scale)
  376. if x == 0:
  377. # last break did not work, then breaks = breaks-1
  378. breaks -= 1
  379. print("\n*******\n"+title+"\n--------\n"+"mu="+str(mu)+"\ntgen="+str(tgen)+"\nbreaks="+str(breaks)+"\n*******\n")
  380. print(cpt, "theta file(s) have been scanned.")
  381. brkpt_lik = []
  382. top_plots = {}
  383. for epoch, scenari in epochs.items():
  384. # sort starting by the smallest -log(Likelihood)
  385. best10_scenari = (sorted(list(scenari.keys())))[:10]
  386. greatest_likelihood = best10_scenari[0]
  387. # store the tuple breakpoints and likelihood for later plot
  388. brkpt_lik.append((epoch, greatest_likelihood))
  389. x, y = scenari[greatest_likelihood]
  390. #without breakpoint
  391. if epoch == 0:
  392. # do something with the theta without bp and skip the plotting
  393. N0 = y[0]
  394. #continue
  395. for i in range(len(y)):
  396. # divide by N0
  397. y[i] = y[i]/N0
  398. x[i] = x[i]/N0
  399. top_plots[greatest_likelihood] = x,y,epoch
  400. plots_likelihoods = list(top_plots.keys())
  401. for i in range(len(plots_likelihoods)):
  402. plots_likelihoods[i] = float(plots_likelihoods[i])
  403. best10_plots = sorted(plots_likelihoods)[:10]
  404. top_plot_lik = str(best10_plots[0])
  405. # store x,y,brks,likelihood
  406. plots['best'] = (top_plots[top_plot_lik][0], top_plots[top_plot_lik][1], str(top_plots[top_plot_lik][2]), top_plot_lik)
  407. plots['plots'] = []
  408. for k, plot_Lk in enumerate(best10_plots[1:]):
  409. plot_Lk = str(plot_Lk)
  410. plots['plots'].append([top_plots[plot_Lk][0], top_plots[plot_Lk][1], str(top_plots[plot_Lk][2]), plot_Lk])
  411. # plot likelihood against nb of breakpoints
  412. # best possible likelihood from SFS
  413. # Segregating sites
  414. S = sum(SFS_stored)
  415. # Number of kept sites from which the SFS is computed
  416. L = L_stored
  417. # number of monomorphic sites
  418. S0 = L-S
  419. # print("SFS", SFS_stored)
  420. # print("S", S, "L", L, "S0=", S0)
  421. # compute Ln
  422. Ln = log_facto(S+S0) - log_facto(S0) + np.log(float(S0)/(S+S0)) * S0
  423. for xi in range(0, len(SFS_stored)):
  424. p_i = SFS_stored[xi] / float(S+S0)
  425. Ln += np.log(p_i) * SFS_stored[xi] - log_facto(SFS_stored[xi])
  426. # basic plot likelihood
  427. Ln_Brks = [list(np.array(brkpt_lik)[:, 0]), list(np.array(brkpt_lik)[:, 1].astype(float))]
  428. best_Ln = -Ln
  429. AIC = []
  430. for brk in np.array(brkpt_lik)[:, 0]:
  431. brk = int(brk)
  432. AIC.append((2*brk+1)+2*np.array(brkpt_lik)[brk, 1].astype(float))
  433. AIC_Brks = [list(np.array(brkpt_lik)[:, 0]), AIC]
  434. # AIC = 2*k - 2ln(L) ; where k is the number of parameters, here brks+1
  435. AIC_ln = 2*(len(brkpt_lik)+1) - 2*Ln
  436. best_AIC = AIC_ln
  437. # to return : plots ; Ln_Brks ; AIC_Brks ; best_Ln ; best_AIC
  438. # 'plots' dict keys: 'best', {epochs}('0', '1',...)
  439. if input == None:
  440. saved_plots = {"all_epochs":plots, "Ln_Brks":Ln_Brks,
  441. "AIC_Brks":AIC_Brks, "best_Ln":best_Ln,
  442. "best_AIC":best_AIC}
  443. else:
  444. # if the dict has to be loaded from input
  445. with open(input, 'r') as json_file:
  446. saved_plots = json.load(json_file)
  447. saved_plots["all_epochs"] = plots
  448. saved_plots["Ln_Brks"] = Ln_Brks
  449. saved_plots["AIC_Brks"] = AIC_Brks
  450. saved_plots["best_Ln"] = best_Ln
  451. saved_plots["best_AIC"] = best_AIC
  452. if output == None:
  453. output = title+"_plotdata.json"
  454. with open(output, 'w') as json_file:
  455. json.dump(saved_plots, json_file)
  456. return saved_plots
  457. def save_k_theta(folder_path, mu, tgen, title = "Title", theta_scale = True,
  458. breaks_max = 10, input = None, output = None):
  459. """
  460. Save theta values as is to do basic plots.
  461. """
  462. cpt = 0
  463. epochs = {}
  464. len_sfs = 0
  465. for file_name in os.listdir(folder_path):
  466. cpt +=1
  467. if os.path.isfile(os.path.join(folder_path, file_name)):
  468. for k in range(breaks_max):
  469. x,y,likelihood,thetas,sfs,L = parse_stwp_theta_file(folder_path+file_name, breaks = k,
  470. tgen = tgen,
  471. mu = mu, relative_theta_scale = theta_scale)
  472. if thetas == 0:
  473. continue
  474. if len(thetas)-1 != k:
  475. continue
  476. if k not in epochs.keys():
  477. epochs[k] = {}
  478. likelihood = str(eval(thetas[k][2]))
  479. epochs[k][likelihood] = thetas
  480. #epochs[k] = thetas
  481. print("\n*******\n"+title+"\n--------\n"+"mu="+str(mu)+"\ntgen="+str(tgen)+"\nbreaks="+str(k)+"\n*******\n")
  482. print(cpt, "theta file(s) have been scanned.")
  483. plots = []
  484. best_epochs = {}
  485. for epoch in epochs:
  486. likelihoods = []
  487. for key in epochs[epoch].keys():
  488. likelihoods.append(key)
  489. likelihoods.sort()
  490. minLogLn = str(likelihoods[0])
  491. best_epochs[epoch] = epochs[epoch][minLogLn]
  492. for epoch, theta in best_epochs.items():
  493. groups = np.array(list(theta.values()), dtype=object)[:, 1].tolist()
  494. x = []
  495. y = []
  496. thetas = np.array(list(theta.values()), dtype=object)[:, 0]
  497. for i,group in enumerate(groups):
  498. x += group[::-1]
  499. y += list(np.repeat(thetas[i], len(group)))
  500. if epoch == 0:
  501. N0 = y[0]
  502. # compute the proportion of information used at each bin of the SFS
  503. sum_theta_i = 0
  504. for i in range(2, len(y)+2):
  505. sum_theta_i+=y[i-2] / (i-1)
  506. prop = []
  507. for k in range(2, len(y)+2):
  508. prop.append(y[k-2] / (k - 1) / sum_theta_i)
  509. prop = prop[::-1]
  510. # normalise to N0 (N0 of epoch1)
  511. for i in range(len(y)):
  512. y[i] = y[i]/N0
  513. # x_plot, y_plot = plot_straight_x_y(x, y)
  514. p = x, y
  515. # add plot to the list of all plots to superimpose
  516. plots.append(p)
  517. cumul = 0
  518. prop_cumul = []
  519. for val in prop:
  520. prop_cumul.append(val+cumul)
  521. cumul = val+cumul
  522. prop = prop_cumul
  523. lines_fig2 = []
  524. for epoch, theta in best_epochs.items():
  525. groups = np.array(list(theta.values()), dtype=object)[:, 1].tolist()
  526. x = []
  527. y = []
  528. thetas = np.array(list(theta.values()), dtype=object)[:, 0]
  529. for i,group in enumerate(groups):
  530. x += group[::-1]
  531. y += list(np.repeat(thetas[i], len(group)))
  532. if epoch == 0:
  533. N0 = y[0]
  534. for i in range(len(y)):
  535. y[i] = y[i]/N0
  536. x_2 = []
  537. T = 0
  538. for i in range(len(x)):
  539. x[i] = int(x[i])
  540. # compute the times as: theta_k / (k*(k-1))
  541. for i in range(0, len(x)):
  542. T += y[i] / (x[i]*(x[i]-1))
  543. x_2.append(T)
  544. # Save plotting (fig 2)
  545. x_2 = [0]+x_2
  546. y = [y[0]]+y
  547. # x2_plot, y2_plot = plot_straight_x_y(x_2, y)
  548. p2 = x_2, y
  549. lines_fig2.append(p2)
  550. if input == None:
  551. saved_plots = {"raw_stairs":plots, "scaled_stairs":lines_fig2,
  552. "prop":prop}
  553. else:
  554. # if the dict has to be loaded from input
  555. with open(input, 'r') as json_file:
  556. saved_plots = json.load(json_file)
  557. saved_plots["raw_stairs"] = plots
  558. saved_plots["scaled_stairs"] = lines_fig2
  559. saved_plots["prop"] = prop
  560. if output == None:
  561. output = title+"_plotdata.json"
  562. with open(output, 'w') as json_file:
  563. json.dump(saved_plots, json_file)
  564. return saved_plots
  565. def plot_scaled_theta(plot_lines, prop, title, ax = None, n_ticks = 10):
  566. # fig 2 & 3
  567. if ax is None:
  568. my_dpi = 300
  569. fnt_size = 18
  570. fig2, ax2 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  571. fig3, ax3 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  572. else:
  573. # plt.rcParams['font.size'] = fnt_size
  574. fnt_size = 12
  575. # place of plots on the grid
  576. ax2 = ax[1,0]
  577. ax3 = ax[1,1]
  578. lines_fig2 = []
  579. lines_fig3 = []
  580. #plt.figure(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  581. for epoch, plot in enumerate(plot_lines):
  582. x,y=plot
  583. x2_plot, y2_plot = plot_straight_x_y(x,y)
  584. p2, = ax2.plot(x2_plot, y2_plot, 'o', linestyle="-", alpha=0.75, lw=2, label = str(epoch)+' brks')
  585. lines_fig2.append(p2)
  586. # Plotting (fig 3) which is the same but log scale for x
  587. p3, = ax3.plot(x2_plot, y2_plot, 'o', linestyle="-", alpha=0.75, lw=2, label = str(epoch)+' brks')
  588. lines_fig3.append(p3)
  589. ax2.set_xlabel("Relative scale", fontsize=fnt_size)
  590. ax2.set_ylabel("theta", fontsize=fnt_size)
  591. ax2.set_title(title, fontsize=fnt_size)
  592. ax2.legend(handles=lines_fig2, loc='best', fontsize = fnt_size*0.5)
  593. if ax is None:
  594. # nb of plot_lines represent the number of epochs stored (len(plot_lines) = #breaks+1)
  595. plt.savefig(title+'_plot2_'+str(len(plot_lines))+'.pdf')
  596. # close fig2 to save memory
  597. plt.close(fig2)
  598. ax3.set_xscale('log')
  599. ax3.set_yscale('log')
  600. ax3.set_xlabel("log Relative scale", fontsize=fnt_size)
  601. ax3.set_ylabel("theta", fontsize=fnt_size)
  602. ax3.set_title(title, fontsize=fnt_size)
  603. ax3.legend(handles=lines_fig3, loc='best', fontsize = fnt_size*0.5)
  604. if ax is None:
  605. # nb of plot_lines represent the number of epochs stored (len(plot_lines) = #breaks+1)
  606. plt.savefig(title+'_plot3_'+str(len(plot_lines))+'_log.pdf')
  607. # close fig3 to save memory
  608. plt.close(fig3)
  609. return ax
  610. def plot_raw_stairs(plot_lines, prop, title, ax = None, n_ticks = 10):
  611. # multiple fig
  612. if ax is None:
  613. # intialize figure 1
  614. my_dpi = 300
  615. fnt_size = 18
  616. # plt.rcParams['font.size'] = fnt_size
  617. fig, ax1 = plt.subplots(figsize=(5000/my_dpi, 2800/my_dpi), dpi=my_dpi)
  618. else:
  619. fnt_size = 12
  620. # plt.rcParams['font.size'] = fnt_size
  621. ax1 = ax[0, 0]
  622. plt.subplots_adjust(wspace=0.3, hspace=0.3)
  623. plots = []
  624. for epoch, plot in enumerate(plot_lines):
  625. x,y = plot
  626. x_plot, y_plot = plot_straight_x_y(x,y)
  627. p, = ax1.plot(x_plot, y_plot, 'o', linestyle="-", alpha=0.75, lw=2, label = str(epoch)+' brks')
  628. # add plot to the list of all plots to superimpose
  629. plots.append(p)
  630. x_ticks = x
  631. # print(x_ticks)
  632. #print(prop, "\n", sum(prop))
  633. #ax.legend(handles=[p0]+plots)
  634. ax1.set_xlabel("# bin & cumul. prop. of sites", fontsize=fnt_size)
  635. # Set the x-axis locator to reduce the number of ticks to 10
  636. ax1.set_ylabel("theta", fontsize=fnt_size)
  637. ax1.set_title(title, fontsize=fnt_size)
  638. ax1.legend(handles=plots, loc='best', fontsize = fnt_size*0.5)
  639. ax1.set_xticks(x_ticks)
  640. step = len(x_ticks)//(n_ticks-1)
  641. values = x_ticks[::step]
  642. new_prop = []
  643. for val in values:
  644. new_prop.append(prop[int(val)-2])
  645. new_prop = new_prop[::-1]
  646. ax1.set_xticks(values)
  647. ax1.set_xticklabels([f'{values[k]}\n{val:.2f}' for k, val in enumerate(new_prop)], fontsize = fnt_size*0.8)
  648. if ax is None:
  649. # nb of plot_lines represent the number of epochs stored (len(plot_lines) = #breaks+1)
  650. plt.savefig(title+'_raw'+str(len(plot_lines))+'.pdf')
  651. plt.close(fig)
  652. # return plots
  653. return ax
  654. def combined_plot(folder_path, mu, tgen, breaks, title = "Title", theta_scale = True):
  655. my_dpi = 300
  656. # # Add some extra space for the second axis at the bottom
  657. # #plt.rcParams['font.size'] = 18
  658. # fig, axs = plt.subplots(2, 2, figsize=(5000/my_dpi, 2970/my_dpi), dpi=my_dpi)
  659. # #plt.rcParams['font.size'] = 12
  660. # ax = plot_all_epochs_thetafolder(folder_path, mu, tgen, title, theta_scale, ax = axs)
  661. # ax = plot_test_theta(folder_path, mu, tgen, title, theta_scale, breaks_max = breaks, ax = axs)
  662. # # Adjust layout to prevent clipping of titles
  663. #
  664. # # Save the entire grid as a single figure
  665. # plt.savefig(title+'_combined.pdf')
  666. # plt.clf()
  667. # # # second call for individual plots
  668. # # plot_all_epochs_thetafolder(folder_path, mu, tgen, title, theta_scale, ax = None)
  669. # # plot_test_theta(folder_path, mu, tgen, title, theta_scale, breaks_max = breaks, ax = None)
  670. # # plt.clf()
  671. save_k_theta(folder_path, mu, tgen, title, theta_scale, breaks_max = breaks, output = title+"_plotdata.json")
  672. save_all_epochs_thetafolder(folder_path, mu, tgen, title, theta_scale, input = title+"_plotdata.json", output = title+"_plotdata.json")
  673. with open(title+"_plotdata.json", 'r') as json_file:
  674. loaded_data = json.load(json_file)
  675. # plot page 1 of summary
  676. fig1, ax1 = plt.subplots(2, 2, figsize=(5000/my_dpi, 2970/my_dpi), dpi=my_dpi)
  677. # fig1.tight_layout()
  678. # Adjust absolute space between the top and bottom rows
  679. fig1.subplots_adjust(hspace=0.35) # Adjust this value based on your requirement
  680. # plot page 2 of summary
  681. fig2, ax2 = plt.subplots(2, 2, figsize=(5000/my_dpi, 2970/my_dpi), dpi=my_dpi)
  682. # fig2.tight_layout()
  683. ax1 = plot_raw_stairs(plot_lines = loaded_data['raw_stairs'],
  684. prop = loaded_data['prop'], title = title, ax = ax1)
  685. ax1 = plot_scaled_theta(plot_lines = loaded_data['scaled_stairs'],
  686. prop = loaded_data['prop'], title = title, ax = ax1)
  687. ax1, ax2 = plot_all_epochs_thetafolder(loaded_data, mu, tgen, title, theta_scale, ax = [ax1, ax2])
  688. fig1.savefig(title+'_combined_p1.pdf')
  689. fig2.savefig(title+'_combined_p2.pdf')
  690. plot_raw_stairs(plot_lines = loaded_data['raw_stairs'],
  691. prop = loaded_data['prop'], title = title, ax = None)
  692. plot_scaled_theta(plot_lines = loaded_data['scaled_stairs'],
  693. prop = loaded_data['prop'], title = title, ax = None)
  694. plt.close(fig1)
  695. plt.close(fig2)
  696. if __name__ == "__main__":
  697. if len(sys.argv) != 4:
  698. print("Need 3 args: ThetaFolder MutationRate GenerationTime")
  699. exit(0)
  700. folder_path = sys.argv[1]
  701. mu = sys.argv[2]
  702. tgen = sys.argv[3]
  703. plot_all_epochs_thetafolder(folder_path, mu, tgen)