22
33import numpy as np
44from sdf import Group , Dataset
5+ import scipy .io
6+
7+ # extract strings from the matrix
8+ strMatNormal = lambda a : ['' .join (s ).rstrip () for s in a ]
9+ strMatTrans = lambda a : ['' .join (s ).rstrip () for s in zip (* a )]
10+
511
612def _split_description (comment ):
713
@@ -29,68 +35,86 @@ def _split_description(comment):
2935 return unit , display_unit , comment , info
3036
3137
32- def load (filename , objectname , unit = None , scale_units = None ):
38+ def load (filename , objectname ):
39+
40+ g_root = _load_mat (filename )
3341
3442 if objectname == '/' :
35- return load_all ( filename )
43+ return g_root
3644 else :
37- return load_dataset (filename , objectname , unit , scale_units )
45+ obj = g_root
46+ segments = objectname .split ('/' )
47+ for s in segments :
48+ if s :
49+ obj = obj [s ]
50+ return obj
3851
3952
40- def load_dataset (filename , path , unit = None , scale_units = None ):
53+ def _load_mat (filename ):
4154
42- import DyMat
55+ mat = scipy . io . loadmat ( filename , chars_as_strings = False )
4356
44- df = DyMat .DyMatFile (filename )
57+ _vars = {}
58+ _blocks = []
4559
46- # remove the leading slash
47- if path .startswith ('/' ):
48- path = path [1 :]
60+ try :
61+ fileInfo = strMatNormal (mat ['Aclass' ])
62+ except KeyError :
63+ raise Exception ('File structure not supported!' )
4964
50- # change to the path dot notation
51- path = path .replace ('/' , '.' )
65+ if fileInfo [1 ] == '1.1' :
66+ if fileInfo [3 ] == 'binTrans' :
67+ # usually files from OpenModelica or Dymola auto saved,
68+ # all methods rely on this structure since this was the only
69+ # one understand by earlier versions
70+ names = strMatTrans (mat ['name' ]) # names
71+ descr = strMatTrans (mat ['description' ]) # descriptions
5272
53- # get the variable name
54- name = path . split ( '.' )[ - 1 ]
73+ cons = mat [ 'data_1' ]
74+ traj = mat [ 'data_2' ]
5575
56- unit , display_unit , comment , info = _split_description (df .description (path ))
76+ d = mat ['dataInfo' ][0 , :]
77+ x = mat ['dataInfo' ][1 , :]
5778
58- data = df .data (path )
79+ elif fileInfo [3 ] == 'binNormal' :
80+ # usually files from dymola, save as...,
81+ # variables are mapped to the structure above ('binTrans')
82+ names = strMatNormal (mat ['name' ]) # names
83+ descr = strMatNormal (mat ['description' ]) # descriptions
5984
60- if 'type' in info :
61- if info ['type' ] == 'Integer' or 'Boolean' :
62- data = np .asarray (data , dtype = np .int32 )
85+ cons = mat ['data_1' ].T
86+ traj = mat ['data_2' ].T
6387
64- if data .size == 2 :
65- ds = Dataset (name , comment = comment , unit = unit , display_unit = display_unit , data = data [0 ])
88+ d = mat ['dataInfo' ][:, 0 ]
89+ x = mat ['dataInfo' ][:, 1 ]
90+ else :
91+ raise Exception ('File structure not supported!' )
92+
93+ c = np .abs (x ) - 1 # column
94+ s = np .sign (x ) # sign
95+
96+ vars = zip (names , descr , d , c , s )
97+ elif fileInfo [1 ] == '1.0' :
98+ # files generated with dymola, save as..., only plotted ...
99+ # fake the structure of a 1.1 transposed file
100+ names = strMatNormal (mat ['names' ]) # names
101+ _blocks .append (0 )
102+ mat ['data_0' ] = mat ['data' ].transpose ()
103+ del mat ['data' ]
104+ _absc = (names [0 ], '' )
105+ for i in range (1 , len (names )):
106+ _vars [names [i ]] = ('' , 0 , i , 1 )
66107 else :
67- a_data , a_name , a_description = df .abscissa (2 )
68- a_unit , _ , a_comment , a_info = _split_description (a_description )
69-
70- ds_time = Dataset (a_name , data = a_data , unit = a_unit , comment = 'Simulation time' )
71-
72- ds = Dataset (name , comment = comment , unit = unit , display_unit = display_unit , data = data , scales = [ds_time ])
73-
74- return ds
75-
76-
77- def load_all (filename ):
78-
79- import DyMat
108+ raise Exception ('File structure not supported!' )
80109
110+ # build the SDF tree
81111 g_root = Group ('/' )
82112
83- df = DyMat . DyMatFile ( filename )
113+ ds_time = None
84114
85- data , name , description = df .abscissa (2 )
86- unit , display_unit , comment , info = _split_description (description )
115+ for name , desc , d , c , s in vars :
87116
88- ds_time = Dataset (name , data = data , unit = unit , comment = 'Simulation time' )
89- g_root .datasets .append (ds_time )
90-
91- for name in df .names ():
92-
93- unit , display_unit , comment , info = _split_description (df .description (name ))
117+ unit , display_unit , comment , info = _split_description (desc )
94118
95119 path = name .split ('.' )
96120
@@ -105,14 +129,20 @@ def load_all(filename):
105129 g_parent = g_child
106130 pass
107131
108- data = df .data (name )
132+ if d == 1 :
133+ data = cons [c , 0 ]
134+ else :
135+ data = traj [c , :]
109136
110137 if 'type' in info :
111138 if info ['type' ] == 'Integer' or 'Boolean' :
112139 data = np .asarray (data , dtype = np .int32 )
113140
114- if data .size == 2 :
115- ds = Dataset (path [- 1 ], comment = comment , unit = unit , display_unit = display_unit , data = data [0 ])
141+ if d == 0 :
142+ ds = Dataset (path [- 1 ], comment = "Simulation time" , unit = unit , display_unit = display_unit , data = data )
143+ ds_time = ds
144+ elif d == 1 :
145+ ds = Dataset (path [- 1 ], comment = comment , unit = unit , display_unit = display_unit , data = data )
116146 else :
117147 ds = Dataset (path [- 1 ], comment = comment , unit = unit , display_unit = display_unit , data = data , scales = [ds_time ])
118148
0 commit comments