asigalov61 commited on
Commit
395f8ee
1 Parent(s): ffdd828

Upload 2 files

Browse files
Files changed (2) hide show
  1. TMIDIX.py +33 -8
  2. TPLOTS.py +1045 -0
TMIDIX.py CHANGED
@@ -7939,17 +7939,34 @@ def chord_to_pchord(chord):
7939
 
7940
  return pchord
7941
 
 
 
7942
  def summarize_escore_notes(escore_notes,
7943
  summary_length_in_chords=128,
7944
- preserve_timings=True
 
 
 
 
7945
  ):
7946
 
7947
  cscore = chordify_score([d[1:] for d in delta_score_notes(escore_notes)])
7948
 
 
 
 
 
 
 
 
7949
  pchords = []
7950
 
7951
  for c in cscore:
7952
- pchords.append(chord_to_pchord(c))
 
 
 
 
7953
 
7954
  step = round(len(pchords) / summary_length_in_chords)
7955
 
@@ -7962,18 +7979,26 @@ def summarize_escore_notes(escore_notes,
7962
 
7963
  for i, s in enumerate(samples):
7964
 
7965
- best_chord = list(Counter(s).most_common()[0][0])
7966
 
7967
- chord = copy.deepcopy(cscore[[list(ss) for ss in s].index(best_chord)+(i*step)])
 
 
 
 
 
 
7968
 
7969
  if preserve_timings:
7970
 
7971
- if i > 0:
 
 
7972
 
7973
- pchord = summarized_escore_notes[-1]
7974
 
7975
- for c in pchord:
7976
- c[1] = max(c[1], chord[0][0])
7977
 
7978
  else:
7979
 
 
7939
 
7940
  return pchord
7941
 
7942
+ ###################################################################################
7943
+
7944
  def summarize_escore_notes(escore_notes,
7945
  summary_length_in_chords=128,
7946
+ preserve_timings=True,
7947
+ preserve_durations=False,
7948
+ time_threshold=12,
7949
+ min_sum_chord_len=2,
7950
+ use_tones_chords=True
7951
  ):
7952
 
7953
  cscore = chordify_score([d[1:] for d in delta_score_notes(escore_notes)])
7954
 
7955
+ summary_length_in_chords = min(len(cscore), summary_length_in_chords)
7956
+
7957
+ ltthresh = time_threshold // 2
7958
+ uttresh = time_threshold * 2
7959
+
7960
+ mc_time = Counter([c[0][0] for c in cscore if c[0][2] != 9 and ltthresh < c[0][0] < uttresh]).most_common()[0][0]
7961
+
7962
  pchords = []
7963
 
7964
  for c in cscore:
7965
+ if use_tones_chords:
7966
+ pchords.append([c[0][0]] + pitches_to_tones_chord(chord_to_pchord(c)))
7967
+
7968
+ else:
7969
+ pchords.append([c[0][0]] + chord_to_pchord(c))
7970
 
7971
  step = round(len(pchords) / summary_length_in_chords)
7972
 
 
7979
 
7980
  for i, s in enumerate(samples):
7981
 
7982
+ best_chord = list([v[0] for v in Counter(s).most_common() if v[0][0] == mc_time and len(v[0]) > min_sum_chord_len])
7983
 
7984
+ if not best_chord:
7985
+ best_chord = list([v[0] for v in Counter(s).most_common() if len(v[0]) > min_sum_chord_len])
7986
+
7987
+ if not best_chord:
7988
+ best_chord = list([Counter(s).most_common()[0][0]])
7989
+
7990
+ chord = copy.deepcopy(cscore[[ss for ss in s].index(best_chord[0])+(i*step)])
7991
 
7992
  if preserve_timings:
7993
 
7994
+ if not preserve_durations:
7995
+
7996
+ if i > 0:
7997
 
7998
+ pchord = summarized_escore_notes[-1]
7999
 
8000
+ for pc in pchord:
8001
+ pc[1] = min(pc[1], chord[0][0])
8002
 
8003
  else:
8004
 
TPLOTS.py ADDED
@@ -0,0 +1,1045 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/python3
2
+
3
+ r'''############################################################################
4
+ ################################################################################
5
+ #
6
+ #
7
+ # Tegridy Plots Python Module (TPLOTS)
8
+ # Version 1.0
9
+ #
10
+ # Project Los Angeles
11
+ #
12
+ # Tegridy Code 2024
13
+ #
14
+ # https://github.com/asigalov61/tegridy-tools
15
+ #
16
+ #
17
+ ################################################################################
18
+ #
19
+ # Copyright 2024 Project Los Angeles / Tegridy Code
20
+ #
21
+ # Licensed under the Apache License, Version 2.0 (the "License");
22
+ # you may not use this file except in compliance with the License.
23
+ # You may obtain a copy of the License at
24
+ #
25
+ # http://www.apache.org/licenses/LICENSE-2.0
26
+ #
27
+ # Unless required by applicable law or agreed to in writing, software
28
+ # distributed under the License is distributed on an "AS IS" BASIS,
29
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
30
+ # See the License for the specific language governing permissions and
31
+ # limitations under the License.
32
+ #
33
+ ################################################################################
34
+ ################################################################################
35
+ #
36
+ # Critical dependencies
37
+ #
38
+ # !pip install numpy
39
+ # !pip install scipy
40
+ # !pip install matplotlib
41
+ # !pip install networkx[all]
42
+ # !pip3 install scikit-learn
43
+ #
44
+ ################################################################################
45
+ #
46
+ # Future critical dependencies
47
+ #
48
+ # !pip install umap-learn
49
+ # !pip install alphashape
50
+ #
51
+ ################################################################################
52
+ '''
53
+
54
+ ################################################################################
55
+ # Modules imports
56
+ ################################################################################
57
+
58
+ import os
59
+ from collections import Counter
60
+ from itertools import groupby
61
+
62
+ import numpy as np
63
+
64
+ import networkx as nx
65
+
66
+ from sklearn.manifold import TSNE
67
+ from sklearn import metrics
68
+ from sklearn.preprocessing import MinMaxScaler
69
+ from sklearn.decomposition import PCA
70
+
71
+ from scipy.ndimage import zoom
72
+ from scipy.spatial import distance_matrix
73
+ from scipy.sparse.csgraph import minimum_spanning_tree
74
+ from scipy.stats import zscore
75
+
76
+ import matplotlib.pyplot as plt
77
+ from PIL import Image
78
+
79
+ ################################################################################
80
+ # Constants
81
+ ################################################################################
82
+
83
+ ALL_CHORDS_FILTERED = [[0], [0, 3], [0, 3, 5], [0, 3, 5, 8], [0, 3, 5, 9], [0, 3, 5, 10], [0, 3, 7],
84
+ [0, 3, 7, 10], [0, 3, 8], [0, 3, 9], [0, 3, 10], [0, 4], [0, 4, 6],
85
+ [0, 4, 6, 9], [0, 4, 6, 10], [0, 4, 7], [0, 4, 7, 10], [0, 4, 8], [0, 4, 9],
86
+ [0, 4, 10], [0, 5], [0, 5, 8], [0, 5, 9], [0, 5, 10], [0, 6], [0, 6, 9],
87
+ [0, 6, 10], [0, 7], [0, 7, 10], [0, 8], [0, 9], [0, 10], [1], [1, 4],
88
+ [1, 4, 6], [1, 4, 6, 9], [1, 4, 6, 10], [1, 4, 6, 11], [1, 4, 7],
89
+ [1, 4, 7, 10], [1, 4, 7, 11], [1, 4, 8], [1, 4, 8, 11], [1, 4, 9], [1, 4, 10],
90
+ [1, 4, 11], [1, 5], [1, 5, 8], [1, 5, 8, 11], [1, 5, 9], [1, 5, 10],
91
+ [1, 5, 11], [1, 6], [1, 6, 9], [1, 6, 10], [1, 6, 11], [1, 7], [1, 7, 10],
92
+ [1, 7, 11], [1, 8], [1, 8, 11], [1, 9], [1, 10], [1, 11], [2], [2, 5],
93
+ [2, 5, 8], [2, 5, 8, 11], [2, 5, 9], [2, 5, 10], [2, 5, 11], [2, 6], [2, 6, 9],
94
+ [2, 6, 10], [2, 6, 11], [2, 7], [2, 7, 10], [2, 7, 11], [2, 8], [2, 8, 11],
95
+ [2, 9], [2, 10], [2, 11], [3], [3, 5], [3, 5, 8], [3, 5, 8, 11], [3, 5, 9],
96
+ [3, 5, 10], [3, 5, 11], [3, 7], [3, 7, 10], [3, 7, 11], [3, 8], [3, 8, 11],
97
+ [3, 9], [3, 10], [3, 11], [4], [4, 6], [4, 6, 9], [4, 6, 10], [4, 6, 11],
98
+ [4, 7], [4, 7, 10], [4, 7, 11], [4, 8], [4, 8, 11], [4, 9], [4, 10], [4, 11],
99
+ [5], [5, 8], [5, 8, 11], [5, 9], [5, 10], [5, 11], [6], [6, 9], [6, 10],
100
+ [6, 11], [7], [7, 10], [7, 11], [8], [8, 11], [9], [10], [11]]
101
+
102
+ ################################################################################
103
+
104
+ CHORDS_TYPES = ['WHITE', 'BLACK', 'UNKNOWN', 'MIXED WHITE', 'MIXED BLACK', 'MIXED GRAY']
105
+
106
+ ################################################################################
107
+
108
+ WHITE_NOTES = [0, 2, 4, 5, 7, 9, 11]
109
+
110
+ ################################################################################
111
+
112
+ BLACK_NOTES = [1, 3, 6, 8, 10]
113
+
114
+ ################################################################################
115
+ # Helper functions
116
+ ################################################################################
117
+
118
+ def tones_chord_type(tones_chord,
119
+ return_chord_type_index=True,
120
+ ):
121
+
122
+ """
123
+ Returns tones chord type
124
+ """
125
+
126
+ WN = WHITE_NOTES
127
+ BN = BLACK_NOTES
128
+ MX = WHITE_NOTES + BLACK_NOTES
129
+
130
+
131
+ CHORDS = ALL_CHORDS_FILTERED
132
+
133
+ tones_chord = sorted(tones_chord)
134
+
135
+ ctype = 'UNKNOWN'
136
+
137
+ if tones_chord in CHORDS:
138
+
139
+ if sorted(set(tones_chord) & set(WN)) == tones_chord:
140
+ ctype = 'WHITE'
141
+
142
+ elif sorted(set(tones_chord) & set(BN)) == tones_chord:
143
+ ctype = 'BLACK'
144
+
145
+ if len(tones_chord) > 1 and sorted(set(tones_chord) & set(MX)) == tones_chord:
146
+
147
+ if len(sorted(set(tones_chord) & set(WN))) == len(sorted(set(tones_chord) & set(BN))):
148
+ ctype = 'MIXED GRAY'
149
+
150
+ elif len(sorted(set(tones_chord) & set(WN))) > len(sorted(set(tones_chord) & set(BN))):
151
+ ctype = 'MIXED WHITE'
152
+
153
+ elif len(sorted(set(tones_chord) & set(WN))) < len(sorted(set(tones_chord) & set(BN))):
154
+ ctype = 'MIXED BLACK'
155
+
156
+ if return_chord_type_index:
157
+ return CHORDS_TYPES.index(ctype)
158
+
159
+ else:
160
+ return ctype
161
+
162
+ ###################################################################################
163
+
164
+ def tone_type(tone,
165
+ return_tone_type_index=True
166
+ ):
167
+
168
+ """
169
+ Returns tone type
170
+ """
171
+
172
+ tone = tone % 12
173
+
174
+ if tone in BLACK_NOTES:
175
+ if return_tone_type_index:
176
+ return CHORDS_TYPES.index('BLACK')
177
+ else:
178
+ return "BLACK"
179
+
180
+ else:
181
+ if return_tone_type_index:
182
+ return CHORDS_TYPES.index('WHITE')
183
+ else:
184
+ return "WHITE"
185
+
186
+ ###################################################################################
187
+
188
+ def find_closest_points(points, return_points=True):
189
+
190
+ """
191
+ Find closest 2D points
192
+ """
193
+
194
+ coords = np.array(points)
195
+
196
+ num_points = coords.shape[0]
197
+ closest_matches = np.zeros(num_points, dtype=int)
198
+ distances = np.zeros((num_points, num_points))
199
+
200
+ for i in range(num_points):
201
+ for j in range(num_points):
202
+ if i != j:
203
+ distances[i, j] = np.linalg.norm(coords[i] - coords[j])
204
+ else:
205
+ distances[i, j] = np.inf
206
+
207
+ closest_matches = np.argmin(distances, axis=1)
208
+
209
+ if return_points:
210
+ points_matches = coords[closest_matches].tolist()
211
+ return points_matches
212
+
213
+ else:
214
+ return closest_matches.tolist()
215
+
216
+ ################################################################################
217
+
218
+ def reduce_dimensionality_tsne(list_of_valies,
219
+ n_comp=2,
220
+ n_iter=5000,
221
+ verbose=True
222
+ ):
223
+
224
+ """
225
+ Reduces the dimensionality of the values using t-SNE.
226
+ """
227
+
228
+ vals = np.array(list_of_valies)
229
+
230
+ tsne = TSNE(n_components=n_comp,
231
+ n_iter=n_iter,
232
+ verbose=verbose)
233
+
234
+ reduced_vals = tsne.fit_transform(vals)
235
+
236
+ return reduced_vals.tolist()
237
+
238
+ ################################################################################
239
+
240
+ def compute_mst_edges(similarity_scores_list):
241
+
242
+ """
243
+ Computes the Minimum Spanning Tree (MST) edges based on the similarity scores.
244
+ """
245
+
246
+ num_tokens = len(similarity_scores_list[0])
247
+
248
+ graph = nx.Graph()
249
+
250
+ for i in range(num_tokens):
251
+ for j in range(i + 1, num_tokens):
252
+ weight = 1 - similarity_scores_list[i][j]
253
+ graph.add_edge(i, j, weight=weight)
254
+
255
+ mst = nx.minimum_spanning_tree(graph)
256
+
257
+ mst_edges = list(mst.edges(data=False))
258
+
259
+ return mst_edges
260
+
261
+ ################################################################################
262
+
263
+ def square_binary_matrix(binary_matrix,
264
+ matrix_size=128,
265
+ interpolation_order=5,
266
+ return_square_matrix_points=False
267
+ ):
268
+
269
+ """
270
+ Reduces an arbitrary binary matrix to a square binary matrix
271
+ """
272
+
273
+ zoom_factors = (matrix_size / len(binary_matrix), 1)
274
+
275
+ resized_matrix = zoom(binary_matrix, zoom_factors, order=interpolation_order)
276
+
277
+ resized_matrix = (resized_matrix > 0.5).astype(int)
278
+
279
+ final_matrix = np.zeros((matrix_size, matrix_size), dtype=int)
280
+ final_matrix[:, :resized_matrix.shape[1]] = resized_matrix
281
+
282
+ points = np.column_stack(np.where(final_matrix == 1)).tolist()
283
+
284
+ if return_square_matrix_points:
285
+ return points
286
+
287
+ else:
288
+ return resized_matrix
289
+
290
+ ################################################################################
291
+
292
+ def square_matrix_points_colors(square_matrix_points):
293
+
294
+ """
295
+ Returns colors for square matrix points
296
+ """
297
+
298
+ cmap = generate_colors(12)
299
+
300
+ chords = []
301
+ chords_dict = set()
302
+ counts = []
303
+
304
+ for k, v in groupby(square_matrix_points, key=lambda x: x[0]):
305
+ pgroup = [vv[1] for vv in v]
306
+ chord = sorted(set(pgroup))
307
+ tchord = sorted(set([p % 12 for p in chord]))
308
+ chords_dict.add(tuple(tchord))
309
+ chords.append(tuple(tchord))
310
+ counts.append(len(pgroup))
311
+
312
+ chords_dict = sorted(chords_dict)
313
+
314
+ colors = []
315
+
316
+ for i, c in enumerate(chords):
317
+ colors.extend([cmap[round(sum(c) / len(c))]] * counts[i])
318
+
319
+ return colors
320
+
321
+ ################################################################################
322
+
323
+ def hsv_to_rgb(h, s, v):
324
+
325
+ if s == 0.0:
326
+ return v, v, v
327
+
328
+ i = int(h*6.0)
329
+ f = (h*6.0) - i
330
+ p = v*(1.0 - s)
331
+ q = v*(1.0 - s*f)
332
+ t = v*(1.0 - s*(1.0-f))
333
+ i = i%6
334
+
335
+ return [(v, t, p), (q, v, p), (p, v, t), (p, q, v), (t, p, v), (v, p, q)][i]
336
+
337
+ ################################################################################
338
+
339
+ def generate_colors(n):
340
+ return [hsv_to_rgb(i/n, 1, 1) for i in range(n)]
341
+
342
+ ################################################################################
343
+
344
+ def add_arrays(a, b):
345
+ return [sum(pair) for pair in zip(a, b)]
346
+
347
+ ################################################################################
348
+
349
+ def calculate_similarities(lists_of_values, metric='cosine'):
350
+ return metrics.pairwise_distances(lists_of_values, metric=metric).tolist()
351
+
352
+ ################################################################################
353
+
354
+ def get_tokens_embeddings(x_transformer_model):
355
+ return x_transformer_model.net.token_emb.emb.weight.detach().cpu().tolist()
356
+
357
+ ################################################################################
358
+
359
+ def minkowski_distance_matrix(X, p=3):
360
+
361
+ X = np.array(X)
362
+
363
+ n = X.shape[0]
364
+ dist_matrix = np.zeros((n, n))
365
+
366
+ for i in range(n):
367
+ for j in range(n):
368
+ dist_matrix[i, j] = np.sum(np.abs(X[i] - X[j])**p)**(1/p)
369
+
370
+ return dist_matrix.tolist()
371
+
372
+ ################################################################################
373
+
374
+ def robust_normalize(values):
375
+
376
+ values = np.array(values)
377
+ q1 = np.percentile(values, 25)
378
+ q3 = np.percentile(values, 75)
379
+ iqr = q3 - q1
380
+
381
+ filtered_values = values[(values >= q1 - 1.5 * iqr) & (values <= q3 + 1.5 * iqr)]
382
+
383
+ min_val = np.min(filtered_values)
384
+ max_val = np.max(filtered_values)
385
+ normalized_values = (values - min_val) / (max_val - min_val)
386
+
387
+ normalized_values = np.clip(normalized_values, 0, 1)
388
+
389
+ return normalized_values.tolist()
390
+
391
+ ################################################################################
392
+
393
+ def min_max_normalize(values):
394
+
395
+ scaler = MinMaxScaler()
396
+
397
+ return scaler.fit_transform(values).tolist()
398
+
399
+ ################################################################################
400
+
401
+ def remove_points_outliers(points, z_score_threshold=3):
402
+
403
+ points = np.array(points)
404
+
405
+ z_scores = np.abs(zscore(points, axis=0))
406
+
407
+ return points[(z_scores < z_score_threshold).all(axis=1)].tolist()
408
+
409
+ ################################################################################
410
+
411
+ def generate_labels(lists_of_values,
412
+ return_indices_labels=False
413
+ ):
414
+
415
+ ordered_indices = list(range(len(lists_of_values)))
416
+ ordered_indices_labels = [str(i) for i in ordered_indices]
417
+ ordered_values_labels = [str(lists_of_values[i]) for i in ordered_indices]
418
+
419
+ if return_indices_labels:
420
+ return ordered_indices_labels
421
+
422
+ else:
423
+ return ordered_values_labels
424
+
425
+ ################################################################################
426
+
427
+ def reduce_dimensionality_pca(list_of_values, n_components=2):
428
+
429
+ """
430
+ Reduces the dimensionality of the values using PCA.
431
+ """
432
+
433
+ pca = PCA(n_components=n_components)
434
+ pca_data = pca.fit_transform(list_of_values)
435
+
436
+ return pca_data.tolist()
437
+
438
+ def reduce_dimensionality_simple(list_of_values,
439
+ return_means=True,
440
+ return_std_devs=True,
441
+ return_medians=False,
442
+ return_vars=False
443
+ ):
444
+
445
+ '''
446
+ Reduces dimensionality of the values in a simple way
447
+ '''
448
+
449
+ array = np.array(list_of_values)
450
+ results = []
451
+
452
+ if return_means:
453
+ means = np.mean(array, axis=1)
454
+ results.append(means)
455
+
456
+ if return_std_devs:
457
+ std_devs = np.std(array, axis=1)
458
+ results.append(std_devs)
459
+
460
+ if return_medians:
461
+ medians = np.median(array, axis=1)
462
+ results.append(medians)
463
+
464
+ if return_vars:
465
+ vars = np.var(array, axis=1)
466
+ results.append(vars)
467
+
468
+ merged_results = np.column_stack(results)
469
+
470
+ return merged_results.tolist()
471
+
472
+ ################################################################################
473
+
474
+ def reduce_dimensionality_2d_distance(list_of_values, p=5):
475
+
476
+ '''
477
+ Reduces the dimensionality of the values using 2d distance
478
+ '''
479
+
480
+ values = np.array(list_of_values)
481
+
482
+ dist_matrix = distance_matrix(values, values, p=p)
483
+
484
+ mst = minimum_spanning_tree(dist_matrix).toarray()
485
+
486
+ points = []
487
+
488
+ for i in range(len(values)):
489
+ for j in range(len(values)):
490
+ if mst[i, j] > 0:
491
+ points.append([i, j])
492
+
493
+ return points
494
+
495
+ ################################################################################
496
+
497
+ def normalize_to_range(values, n):
498
+
499
+ min_val = min(values)
500
+ max_val = max(values)
501
+
502
+ range_val = max_val - min_val
503
+
504
+ normalized_values = [((value - min_val) / range_val * 2 * n) - n for value in values]
505
+
506
+ return normalized_values
507
+
508
+ ################################################################################
509
+
510
+ def reduce_dimensionality_simple_pca(list_of_values, n_components=2):
511
+
512
+ '''
513
+ Reduces the dimensionality of the values using simple PCA
514
+ '''
515
+
516
+ reduced_values = []
517
+
518
+ for l in list_of_values:
519
+
520
+ norm_values = [round(v * len(l)) for v in normalize_to_range(l, (n_components+1) // 2)]
521
+
522
+ pca_values = Counter(norm_values).most_common()
523
+ pca_values = [vv[0] / len(l) for vv in pca_values]
524
+ pca_values = pca_values[:n_components]
525
+ pca_values = pca_values + [0] * (n_components - len(pca_values))
526
+
527
+ reduced_values.append(pca_values)
528
+
529
+ return reduced_values
530
+
531
+ ################################################################################
532
+
533
+ def filter_and_replace_values(list_of_values,
534
+ threshold,
535
+ replace_value,
536
+ replace_above_threshold=False
537
+ ):
538
+
539
+ array = np.array(list_of_values)
540
+
541
+ modified_array = np.copy(array)
542
+
543
+ if replace_above_threshold:
544
+ modified_array[modified_array > threshold] = replace_value
545
+
546
+ else:
547
+ modified_array[modified_array < threshold] = replace_value
548
+
549
+ return modified_array.tolist()
550
+
551
+ ################################################################################
552
+
553
+ def find_shortest_constellation_path(points,
554
+ start_point_idx,
555
+ end_point_idx,
556
+ p=5,
557
+ return_path_length=False,
558
+ return_path_points=False,
559
+ ):
560
+
561
+ """
562
+ Finds the shortest path between two points of the points constellation
563
+ """
564
+
565
+ points = np.array(points)
566
+
567
+ dist_matrix = distance_matrix(points, points, p=p)
568
+
569
+ mst = minimum_spanning_tree(dist_matrix).toarray()
570
+
571
+ G = nx.Graph()
572
+
573
+ for i in range(len(points)):
574
+ for j in range(len(points)):
575
+ if mst[i, j] > 0:
576
+ G.add_edge(i, j, weight=mst[i, j])
577
+
578
+ path = nx.shortest_path(G,
579
+ source=start_point_idx,
580
+ target=end_point_idx,
581
+ weight='weight'
582
+ )
583
+
584
+ path_length = nx.shortest_path_length(G,
585
+ source=start_point_idx,
586
+ target=end_point_idx,
587
+ weight='weight')
588
+
589
+ path_points = points[np.array(path)].tolist()
590
+
591
+
592
+ if return_path_points:
593
+ return path_points
594
+
595
+ if return_path_length:
596
+ return path_length
597
+
598
+ return path
599
+
600
+ ################################################################################
601
+ # Core functions
602
+ ################################################################################
603
+
604
+ def plot_ms_SONG(ms_song,
605
+ preview_length_in_notes=0,
606
+ block_lines_times_list = None,
607
+ plot_title='ms Song',
608
+ max_num_colors=129,
609
+ drums_color_num=128,
610
+ plot_size=(11,4),
611
+ note_height = 0.75,
612
+ show_grid_lines=False,
613
+ return_plt = False,
614
+ timings_multiplier=1,
615
+ save_plt='',
616
+ save_only_plt_image=True,
617
+ save_transparent=False
618
+ ):
619
+
620
+ '''ms SONG plot'''
621
+
622
+ notes = [s for s in ms_song if s[0] == 'note']
623
+
624
+ if (len(max(notes, key=len)) != 7) and (len(min(notes, key=len)) != 7):
625
+ print('The song notes do not have patches information')
626
+ print('Ploease add patches to the notes in the song')
627
+
628
+ else:
629
+
630
+ start_times = [(s[1] * timings_multiplier) / 1000 for s in notes]
631
+ durations = [(s[2] * timings_multiplier) / 1000 for s in notes]
632
+ pitches = [s[4] for s in notes]
633
+ patches = [s[6] for s in notes]
634
+
635
+ colors = generate_colors(max_num_colors)
636
+ colors[drums_color_num] = (1, 1, 1)
637
+
638
+ pbl = (notes[preview_length_in_notes][1] * timings_multiplier) / 1000
639
+
640
+ fig, ax = plt.subplots(figsize=plot_size)
641
+
642
+ for start, duration, pitch, patch in zip(start_times, durations, pitches, patches):
643
+ rect = plt.Rectangle((start, pitch), duration, note_height, facecolor=colors[patch])
644
+ ax.add_patch(rect)
645
+
646
+ ax.set_xlim([min(start_times), max(add_arrays(start_times, durations))])
647
+ ax.set_ylim([min(pitches)-1, max(pitches)+1])
648
+
649
+ ax.set_facecolor('black')
650
+ fig.patch.set_facecolor('white')
651
+
652
+ if preview_length_in_notes > 0:
653
+ ax.axvline(x=pbl, c='white')
654
+
655
+ if block_lines_times_list:
656
+ for bl in block_lines_times_list:
657
+ ax.axvline(x=bl, c='white')
658
+
659
+ if show_grid_lines:
660
+ ax.grid(color='white')
661
+
662
+ plt.xlabel('Time (s)', c='black')
663
+ plt.ylabel('MIDI Pitch', c='black')
664
+
665
+ plt.title(plot_title)
666
+
667
+ if save_plt != '':
668
+ if save_only_plt_image:
669
+ plt.axis('off')
670
+ plt.title('')
671
+ plt.savefig(save_plt,
672
+ transparent=save_transparent,
673
+ bbox_inches='tight',
674
+ pad_inches=0,
675
+ facecolor='black'
676
+ )
677
+ plt.close()
678
+
679
+ else:
680
+ plt.savefig(save_plt)
681
+ plt.close()
682
+
683
+ if return_plt:
684
+ return fig
685
+
686
+ plt.show()
687
+ plt.close()
688
+
689
+ ################################################################################
690
+
691
+ def plot_square_matrix_points(list_of_points,
692
+ list_of_points_colors,
693
+ plot_size=(7, 7),
694
+ point_size = 10,
695
+ show_grid_lines=False,
696
+ plot_title = 'Square Matrix Points Plot',
697
+ return_plt=False,
698
+ save_plt='',
699
+ save_only_plt_image=True,
700
+ save_transparent=False
701
+ ):
702
+
703
+ '''Square matrix points plot'''
704
+
705
+ fig, ax = plt.subplots(figsize=plot_size)
706
+
707
+ ax.set_facecolor('black')
708
+
709
+ if show_grid_lines:
710
+ ax.grid(color='white')
711
+
712
+ plt.xlabel('Time Step', c='black')
713
+ plt.ylabel('MIDI Pitch', c='black')
714
+
715
+ plt.title(plot_title)
716
+
717
+ plt.scatter([p[0] for p in list_of_points],
718
+ [p[1] for p in list_of_points],
719
+ c=list_of_points_colors,
720
+ s=point_size
721
+ )
722
+
723
+ if save_plt != '':
724
+ if save_only_plt_image:
725
+ plt.axis('off')
726
+ plt.title('')
727
+ plt.savefig(save_plt,
728
+ transparent=save_transparent,
729
+ bbox_inches='tight',
730
+ pad_inches=0,
731
+ facecolor='black'
732
+ )
733
+ plt.close()
734
+
735
+ else:
736
+ plt.savefig(save_plt)
737
+ plt.close()
738
+
739
+ if return_plt:
740
+ return fig
741
+
742
+ plt.show()
743
+ plt.close()
744
+
745
+ ################################################################################
746
+
747
+ def plot_cosine_similarities(lists_of_values,
748
+ plot_size=(7, 7),
749
+ save_plot=''
750
+ ):
751
+
752
+ """
753
+ Cosine similarities plot
754
+ """
755
+
756
+ cos_sim = metrics.pairwise_distances(lists_of_values, metric='cosine')
757
+
758
+ plt.figure(figsize=plot_size)
759
+
760
+ plt.imshow(cos_sim, cmap="inferno", interpolation="nearest")
761
+
762
+ im_ratio = cos_sim.shape[0] / cos_sim.shape[1]
763
+
764
+ plt.colorbar(fraction=0.046 * im_ratio, pad=0.04)
765
+
766
+ plt.xlabel("Index")
767
+ plt.ylabel("Index")
768
+
769
+ plt.tight_layout()
770
+
771
+ if save_plot != '':
772
+ plt.savefig(save_plot, bbox_inches="tight")
773
+ plt.close()
774
+
775
+ plt.show()
776
+ plt.close()
777
+
778
+ ################################################################################
779
+
780
+ def plot_points_with_mst_lines(points,
781
+ points_labels,
782
+ points_mst_edges,
783
+ plot_size=(20, 20),
784
+ labels_size=24,
785
+ save_plot=''
786
+ ):
787
+
788
+ """
789
+ Plots 2D points with labels and MST lines.
790
+ """
791
+
792
+ plt.figure(figsize=plot_size)
793
+
794
+ for i, label in enumerate(points_labels):
795
+ plt.scatter(points[i][0], points[i][1])
796
+ plt.annotate(label, (points[i][0], points[i][1]), fontsize=labels_size)
797
+
798
+ for edge in points_mst_edges:
799
+ i, j = edge
800
+ plt.plot([points[i][0], points[j][0]], [points[i][1], points[j][1]], 'k-', alpha=0.5)
801
+
802
+ plt.title('Points Map with MST Lines', fontsize=labels_size)
803
+ plt.xlabel('X-axis', fontsize=labels_size)
804
+ plt.ylabel('Y-axis', fontsize=labels_size)
805
+
806
+ if save_plot != '':
807
+ plt.savefig(save_plot, bbox_inches="tight")
808
+ plt.close()
809
+
810
+ plt.show()
811
+
812
+ plt.close()
813
+
814
+ ################################################################################
815
+
816
+ def plot_points_constellation(points,
817
+ points_labels,
818
+ p=5,
819
+ plot_size=(15, 15),
820
+ labels_size=12,
821
+ show_grid=False,
822
+ save_plot=''
823
+ ):
824
+
825
+ """
826
+ Plots 2D points constellation
827
+ """
828
+
829
+ points = np.array(points)
830
+
831
+ dist_matrix = distance_matrix(points, points, p=p)
832
+
833
+ mst = minimum_spanning_tree(dist_matrix).toarray()
834
+
835
+ plt.figure(figsize=plot_size)
836
+
837
+ plt.scatter(points[:, 0], points[:, 1], color='blue')
838
+
839
+ for i, label in enumerate(points_labels):
840
+ plt.annotate(label, (points[i, 0], points[i, 1]),
841
+ textcoords="offset points",
842
+ xytext=(0, 10),
843
+ ha='center',
844
+ fontsize=labels_size
845
+ )
846
+
847
+ for i in range(len(points)):
848
+ for j in range(len(points)):
849
+ if mst[i, j] > 0:
850
+ plt.plot([points[i, 0], points[j, 0]], [points[i, 1], points[j, 1]], 'k--')
851
+
852
+ plt.xlabel('X-axis', fontsize=labels_size)
853
+ plt.ylabel('Y-axis', fontsize=labels_size)
854
+ plt.title('2D Coordinates with Minimum Spanning Tree', fontsize=labels_size)
855
+
856
+ plt.grid(show_grid)
857
+
858
+ if save_plot != '':
859
+ plt.savefig(save_plot, bbox_inches="tight")
860
+ plt.close()
861
+
862
+ plt.show()
863
+
864
+ plt.close()
865
+
866
+ ################################################################################
867
+
868
+ def binary_matrix_to_images(matrix,
869
+ step,
870
+ overlap,
871
+ output_folder='./Dataset/',
872
+ output_img_prefix='image',
873
+ output_img_ext='.png',
874
+ save_to_array=False,
875
+ verbose=True
876
+ ):
877
+
878
+ if not save_to_array:
879
+
880
+ if verbose:
881
+ print('=' * 70)
882
+ print('Checking output folder dir...')
883
+
884
+ os.makedirs(os.path.dirname(output_folder), exist_ok=True)
885
+
886
+ if verbose:
887
+ print('Done!')
888
+
889
+ if verbose:
890
+ print('=' * 70)
891
+ print('Writing images...')
892
+
893
+ matrix = np.array(matrix, dtype=np.uint8)
894
+
895
+ image_array = []
896
+
897
+ for i in range(0, max(1, matrix.shape[0]-max(step, overlap)), overlap):
898
+
899
+ submatrix = matrix[i:i+step, :]
900
+
901
+ img = Image.fromarray(submatrix * 255).convert('1')
902
+
903
+ if save_to_array:
904
+ image_array.append(np.array(img))
905
+
906
+ else:
907
+ img.save(output_folder + output_img_prefix + '_' + str(matrix.shape[1]) + '_' + str(i).zfill(7) + output_img_ext)
908
+
909
+ if verbose:
910
+ print('Done!')
911
+ print('=' * 70)
912
+ print('Saved', (matrix.shape[0]-max(step, overlap)) // min(step, overlap)+1, 'imges!')
913
+ print('=' * 70)
914
+
915
+ if save_to_array:
916
+ return np.array(image_array).tolist()
917
+
918
+ ################################################################################
919
+
920
+ def images_to_binary_matrix(list_of_images):
921
+
922
+ image_array = np.array(list_of_images)
923
+
924
+ original_matrix = []
925
+
926
+ for img in image_array:
927
+
928
+ submatrix = np.array(img)
929
+ original_matrix.extend(submatrix.tolist())
930
+
931
+ return original_matrix
932
+
933
+ ################################################################################
934
+ # [WIP] Future dev functions
935
+ ################################################################################
936
+
937
+ '''
938
+ import umap
939
+
940
+ def reduce_dimensionality_umap(list_of_values,
941
+ n_comp=2,
942
+ n_neighbors=15,
943
+ ):
944
+
945
+ """
946
+ Reduces the dimensionality of the values using UMAP.
947
+ """
948
+
949
+ vals = np.array(list_of_values)
950
+
951
+ umap_reducer = umap.UMAP(n_components=n_comp,
952
+ n_neighbors=n_neighbors,
953
+ n_epochs=5000,
954
+ verbose=True
955
+ )
956
+
957
+ reduced_vals = umap_reducer.fit_transform(vals)
958
+
959
+ return reduced_vals.tolist()
960
+ '''
961
+
962
+ ################################################################################
963
+
964
+ '''
965
+ import alphashape
966
+ from shapely.geometry import Point
967
+ from matplotlib.tri import Triangulation, LinearTriInterpolator
968
+ from scipy.stats import zscore
969
+
970
+ #===============================================================================
971
+
972
+ coordinates = points
973
+
974
+ dist_matrix = minkowski_distance_matrix(coordinates, p=3) # You can change the value of p as needed
975
+
976
+ # Centering matrix
977
+ n = dist_matrix.shape[0]
978
+ H = np.eye(n) - np.ones((n, n)) / n
979
+
980
+ # Apply double centering
981
+ B = -0.5 * H @ dist_matrix**2 @ H
982
+
983
+ # Eigen decomposition
984
+ eigvals, eigvecs = np.linalg.eigh(B)
985
+
986
+ # Sort eigenvalues and eigenvectors
987
+ idx = np.argsort(eigvals)[::-1]
988
+ eigvals = eigvals[idx]
989
+ eigvecs = eigvecs[:, idx]
990
+
991
+ # Select the top 2 eigenvectors
992
+ X_transformed = eigvecs[:, :2] * np.sqrt(eigvals[:2])
993
+
994
+ #===============================================================================
995
+
996
+ src_points = X_transformed
997
+ src_values = np.array([[p[1]] for p in points]) #np.random.rand(X_transformed.shape[0])
998
+
999
+ #===============================================================================
1000
+
1001
+ # Normalize the points to the range [0, 1]
1002
+ scaler = MinMaxScaler()
1003
+ points_normalized = scaler.fit_transform(src_points)
1004
+
1005
+ values_normalized = custom_normalize(src_values)
1006
+
1007
+ # Remove outliers based on z-score
1008
+ z_scores = np.abs(zscore(points_normalized, axis=0))
1009
+ filtered_points = points_normalized[(z_scores < 3).all(axis=1)]
1010
+ filtered_values = values_normalized[(z_scores < 3).all(axis=1)]
1011
+
1012
+ # Compute the concave hull (alpha shape)
1013
+ alpha = 8 # Adjust alpha as needed
1014
+ hull = alphashape.alphashape(filtered_points, alpha)
1015
+
1016
+ # Create a triangulation
1017
+ tri = Triangulation(filtered_points[:, 0], filtered_points[:, 1])
1018
+
1019
+ # Interpolate the values on the triangulation
1020
+ interpolator = LinearTriInterpolator(tri, filtered_values[:, 0])
1021
+ xi, yi = np.meshgrid(np.linspace(0, 1, 100), np.linspace(0, 1, 100))
1022
+ zi = interpolator(xi, yi)
1023
+
1024
+ # Mask out points outside the concave hull
1025
+ mask = np.array([hull.contains(Point(x, y)) for x, y in zip(xi.flatten(), yi.flatten())])
1026
+ zi = np.ma.array(zi, mask=~mask.reshape(zi.shape))
1027
+
1028
+ # Plot the filled contour based on the interpolated values
1029
+ plt.contourf(xi, yi, zi, levels=50, cmap='viridis')
1030
+
1031
+ # Plot the original points
1032
+ #plt.scatter(filtered_points[:, 0], filtered_points[:, 1], c=filtered_values, edgecolors='k')
1033
+
1034
+ plt.title('Filled Contour Plot with Original Values')
1035
+ plt.xlabel('X-axis')
1036
+ plt.ylabel('Y-axis')
1037
+ plt.colorbar(label='Value')
1038
+ plt.show()
1039
+ '''
1040
+
1041
+ ################################################################################
1042
+ #
1043
+ # This is the end of TPLOTS Python modules
1044
+ #
1045
+ ################################################################################