Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
What's new
10
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Open sidebar
cosimulation
fmigo
Commits
37a3d6a9
Commit
37a3d6a9
authored
Mar 29, 2018
by
Claude Lacoursiere
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
updated pygo
parent
33748b16
Pipeline
#1074
passed with stages
in 22 minutes and 21 seconds
Changes
5
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
202 additions
and
157 deletions
+202
-157
tools/pygo/examples/refchain.py
tools/pygo/examples/refchain.py
+94
-19
tools/pygo/examples/wrappedconv.py
tools/pygo/examples/wrappedconv.py
+36
-56
tools/pygo/examples/wrappedmodels.py
tools/pygo/examples/wrappedmodels.py
+14
-9
tools/pygo/examples/wrappedplot.py
tools/pygo/examples/wrappedplot.py
+51
-71
tools/pygo/lib/h5load.py
tools/pygo/lib/h5load.py
+7
-2
No files found.
tools/pygo/examples/refchain.py
View file @
37a3d6a9
...
...
@@ -8,6 +8,7 @@ from scipy import sparse
from
scipy.linalg
import
expm
import
hashlib
import
pickle
import
timeit
def
join_struct_arrays2
(
arrays
):
newdtype
=
sum
((
a
.
dtype
.
descr
for
a
in
arrays
),
[])
...
...
@@ -31,7 +32,8 @@ def pack_data(t, x, v):
for
i
in
range
(
0
,
v
.
shape
[
1
]):
newrecarray
[
"v%d"
%
(
i
+
1
)]
=
v
[:,
i
]
return
newrecarray
def
pickle_data
(
t
,
x
,
v
,
config
):
def
pickle_data
(
config
,
x
,
v
,
t
):
l
=
{
"t"
:
t
,
"x"
:
x
,
"v"
:
v
,
"config"
:
config
}
h
=
hashlib
.
md5
(
str
(
config
).
encode
(
"utf8"
)).
hexdigest
()
fname
=
h
+
".pickle"
...
...
@@ -40,7 +42,20 @@ def pickle_data(t, x, v, config):
f
.
close
()
return
fname
def
get_pickle_data
(
t
,
config
):
class
ref_data
():
def
__init__
(
self
,
t
,
x
,
v
):
self
.
d
=
t
self
.
x
=
x
self
.
v
=
v
def
interp
(
t
):
self
.
ts
=
t
self
.
xs
=
interp
.
interp1d
(
d
[
"t"
],
d
[
"x"
],
kind
=
"cubic"
,
axis
=
0
)(
t
)
self
.
xv
=
interp
.
interp1d
(
d
[
"t"
],
d
[
"v"
],
kind
=
"cubic"
,
axis
=
0
)(
t
)
return
self
def
get_pickle_data
(
config
,
t
=
False
):
""" Either open a file with the data or run reference simulation.
In all cases, the data is interpolated to the given time array.
"""
...
...
@@ -57,9 +72,7 @@ def get_pickle_data(t, config):
except
:
if
float
(
config
[
"k"
])
==
float
(
0
):
s
=
spring_chain
(
config
[
"init"
],
config
[
"masses"
],
config
[
"isprings"
])
print
(
"doing ref"
)
else
:
print
(
"doing split"
)
s
=
split_string
(
config
[
"step"
],
config
[
"init"
],
config
[
"masses"
],
config
[
"isprings"
],
k
=
config
[
"k"
])
...
...
@@ -69,8 +82,9 @@ def get_pickle_data(t, config):
f
=
open
(
fname
,
"wb"
)
pickle
.
dump
(
d
,
f
)
f
.
close
()
return
interp
.
interp1d
(
d
[
"t"
],
d
[
"x"
],
kind
=
"cubic"
,
axis
=
0
)(
t
),
interp
.
interp1d
(
d
[
"t"
],
d
[
"v"
],
kind
=
"cubic"
,
axis
=
0
)(
t
),
d
data
=
ref_data
(
d
[
"t"
],
d
[
"x"
],
d
[
"v"
])
if
type
(
t
)
==
np
.
ndarray
:
return
data
.
interp
(
t
if
type
(
t
)
==
np
.
array
data
.
t
)
"""
implementation of a spring damper chain with alternating spring damper
...
...
@@ -225,6 +239,7 @@ class spring():
self
.
data
=
self
.
collector
(
self
,
self
.
z0
)
#if True:
if
h
==
0.0
:
self
.
r
.
set_solout
(
self
.
data
)
self
.
r
.
integrate
(
tend
)
...
...
@@ -342,16 +357,80 @@ class spring_chain(spring):
self
.
z0
=
z0
self
.
t0
=
t0
super
().
doit
(
tend
,
t0
=
self
.
t0
)
super
().
doit
(
tend
,
t0
=
self
.
t0
,
h
=
h
)
def
__call__
(
self
,
t
,
z
):
return
self
.
A
*
z
def
make_config_uniform
(
N
,
step
=
float
(
0
),
k
=
float
(
3
),
tend
=
-
1
):
print
(
"Fucking step = %g and k = %g"
%
(
step
,
k
))
init
=
np
.
array
([[
1
,
0
,
0
,
0
]]
+
[[
0
,
0
,
0
,
0
]]
*
(
N
-
1
))
masses
=
np
.
array
([[
1
,
2
]]
*
N
)
/
N
isprings
=
np
.
array
([[
1
,
0
]]
*
N
)
mu
=
1.0
/
(
1
/
masses
[
0
][
0
]
+
1
/
masses
[
0
][
1
])
min_p
,
max_p
=
get_min_max_period
(
masses
,
isprings
)
if
k
>
0
and
step
>
0
:
c
=
mu
*
(
k
*
2
*
math
.
pi
/
step
)
**
2
d
=
2
*
0.7
*
math
.
sqrt
(
c
*
mu
)
csprings
=
[[
c
,
d
,
0
,
0
]]
*
N
csprings
[
0
]
=
[
0
,
0
,
0
,
0
]
else
:
csprings
=
[[
0
,
0
,
0
,
0
]]
*
N
config
=
{
"N"
:
int
(
N
),
"init"
:
init
,
"masses"
:
masses
,
"isprings"
:
isprings
,
"csprings"
:
csprings
,
"step"
:
step
,
"k"
:
float
(
k
),
"tend"
:
float
(
tend
),
"min_p"
:
float
(
min_p
),
"step"
:
min_p
/
20
,
"tend"
:
2
*
max_p
if
tend
==
-
1
else
tend
}
config
[
"step"
]
=
step
if
step
>
0
else
min_p
/
10.0
return
config
def
measure_perf
(
m
,
N
=
4
,
factor
=
2
):
n
=
[]
t
=
[]
S
=
[]
C
=
[]
tend
=
10
for
i
in
range
(
1
,
m
):
c
=
make_config_uniform
(
N
)
c
[
"tend"
]
=
tend
C
+=
[
c
]
n
+=
[
N
]
s
=
spring_chain
(
c
[
"init"
],
c
[
"masses"
],
c
[
"isprings"
])
S
+=
[
s
]
u
=
timeit
.
Timer
(
lambda
:
s
.
doit
(
c
[
"tend"
],
h
=
c
[
"step"
]))
t
+=
[
u
.
timeit
(
number
=
1
)
]
N
*=
factor
t
=
np
.
array
(
t
)
n
=
np
.
array
(
n
)
try
:
sl
=
np
.
polyfit
(
np
.
log10
(
t
),
np
.
log10
(
n
),
1
)[
1
]
py
.
clf
()
py
.
loglog
(
t
,
n
)
py
.
legend
([
"exponent = %g"
%
sl
])
py
.
title
(
"Computational complexity"
)
py
.
show
()
except
:
pass
return
t
,
n
,
S
,
C
def
__call__
(
self
,
t
,
z
):
return
self
.
A
*
z
if
__name__
==
"__main__"
:
N
=
10
if
not
(
(
'N'
in
vars
()
or
'N'
in
globals
()
)
and
type
(
N
)
==
type
(
0
)
):
N
=
10
init
=
np
.
array
([[
1
,
0
,
0
,
0
]]
+
[[
0
,
0
,
0
,
0
]]
*
(
N
-
1
))
masses
=
np
.
array
([[
1
,
2
]]
*
N
)
isprings
=
np
.
array
([[
1
,
0
]]
*
N
)
...
...
@@ -371,22 +450,18 @@ if __name__ == "__main__":
"min_p"
:
float
(
min_p
),
"step"
:
min_p
/
20
}
eps
=
1e-16
# machine precision
min_p
,
max_p
=
get_min_max_period
(
masses
,
isprings
)
tend
=
2
*
max_p
NPP
=
10
step
=
min_p
/
NPP
if
True
:
s1
=
spring_chain
(
config
[
"init"
],
config
[
"masses"
],
config
[
"isprings"
])
#s1 = spring_chain(config["init"],config["masses"], config["isprings"])
#s1.doit(config["tend"], h=config["step"])
if
False
:
s0
=
split_string
(
config
[
"step"
],
config
[
"init"
],
config
[
"masses"
],
config
[
"isprings"
],
k
=
config
[
"k"
])
s0
.
doit
(
config
[
"tend"
],
h
=
config
[
"step"
])
#
s0.doit(config["tend"], h=config["step"])
t0
=
np
.
linspace
(
10
,
s0
.
t
[
-
1
],
200
)
s0
.
sample
(
t0
)
py
.
figure
(
1
)
...
...
tools/pygo/examples/wrappedconv.py
View file @
37a3d6a9
...
...
@@ -6,61 +6,41 @@ import hashlib
implementation of a spring damper chain with alternating spring damper
constants.
"""
config
=
{}
N
=
10
init
=
np
.
array
([[
1
,
0
,
0
,
0
]]
+
[[
0
,
0
,
0
,
0
]]
*
(
N
-
1
))
masses
=
np
.
array
([[
1
,
2
]]
*
N
)
isprings
=
np
.
array
([[
1
,
0
]]
*
N
)
max_samples
=
1000
k
=
3
mu
=
1.0
/
(
1
/
masses
[
0
][
0
]
+
1
/
masses
[
0
][
1
])
min_p
,
max_p
=
refchain
.
get_min_max_period
(
masses
,
isprings
)
tend
=
3
*
max_p
config
=
{
"N"
:
int
(
N
),
"init"
:
init
,
"masses"
:
masses
,
"isprings"
:
isprings
,
"k"
:
float
(
k
),
"tend"
:
float
(
tend
),
"min_period"
:
float
(
min_p
),
"max_period"
:
float
(
max_p
),
}
##
## We want to prune the following case:
##
orders
=
1
factor
=
5.0
npp
=
[
10
]
step
=
min_p
/
float
(
npp
[
0
])
for
f
in
range
(
1
,
orders
+
1
):
for
kinematic
in
[
True
,
False
]:
for
filtered
in
[
True
,
False
]:
for
parallel
in
[
True
,
False
]:
# kinematic and not parallel or kinematic and filtered make no sense
if
not
(
kinematic
and
filtered
)
and
not
(
kinematic
and
not
parallel
):
if
not
kinematic
:
config
[
"k"
]
=
k
c
=
mu
*
(
k
*
2
*
math
.
pi
/
step
)
**
2
d
=
2
*
0.7
*
math
.
sqrt
(
c
*
mu
)
csprings
=
[[
c
,
d
,
0
,
0
]]
*
N
csprings
[
0
]
=
[
0
,
0
,
0
,
0
]
else
:
csprings
=
[[
0
,
0
,
0
,
0
]]
*
N
config
[
"k"
]
=
float
(
0
)
config
[
"step"
]
=
step
config
[
"npp"
]
=
npp
[
-
1
]
sim
=
chain_mass_springs
(
filtered
,
N
,
init
,
masses
,
isprings
,
csprings
,
kinematic
=
kinematic
)
sim
.
annotations
+=
(
"confighash"
,
hashlib
.
md5
(
str
(
config
).
encode
(
"utf8"
)).
hexdigest
()),
sim
.
annotations
+=
(
"config"
,
config
),
try
:
sim
.
simulate
(
tend
,
step
,
mode
=
"mpi"
,
parallel
=
parallel
,
datafile
=
"chain"
,
max_samples
=
max_samples
)
except
:
pass
step
/=
factor
npp
+=
[
npp
[
-
1
]
*
int
(
factor
)]
def
doit
(
N
,
fname
,
orders
,
k
=
3
,
factor
=
2
,
max_samples
=
1000
):
npp
=
10
# safe reference start value
## get an initial estimate of the "reasonable" time step
config
=
refchain
.
make_config_uniform
(
N
,
k
=
0.0
,
step
=
0.0
)
step
=
config
[
"step"
]
print
(
step
)
for
f
in
range
(
1
,
orders
+
1
):
for
kinematic
in
[
True
,
False
]:
for
filtered
in
[
True
,
False
]:
for
parallel
in
[
True
,
False
]:
if
not
(
kinematic
and
filtered
)
and
not
(
kinematic
and
not
parallel
):
k0
=
float
(
0
)
if
kinematic
else
k
config
=
refchain
.
make_config_uniform
(
N
,
k
=
float
(
0
))
if
kinematic
else
refchain
.
make_config_uniform
(
N
,
step
=
step
,
k
=
k
)
sim
=
chain_mass_springs
(
filtered
,
N
,
config
[
"init"
],
config
[
"masses"
],
config
[
"isprings"
],
config
[
"csprings"
],
kinematic
=
kinematic
)
sim
.
annotations
+=
(
"confighash"
,
hashlib
.
md5
(
str
(
config
).
encode
(
"utf8"
)).
hexdigest
()),
sim
.
annotations
+=
(
"config"
,
config
),
sim
.
annotations
+=
(
"npp"
,
npp
),
try
:
sim
.
simulate
(
config
[
"tend"
],
step
,
mode
=
"mpi"
,
parallel
=
parallel
,
datafile
=
fname
,
max_samples
=
max_samples
)
refchain
.
get_pickle_data
(
config
)
except
:
pass
step
/=
factor
npp
*=
int
(
factor
)
N
=
10
k
=
3
orders
=
3
doit
(
N
,
"chain"
,
orders
,
factor
=
2
,
max_samples
=
1000
)
tools/pygo/examples/wrappedmodels.py
View file @
37a3d6a9
...
...
@@ -5,7 +5,7 @@ import math
umit
=
"../../release/umit-fmus/"
pfmu
=
umit
+
"meWrapper/springs2/wrapper_springs2.fmu"
ffmu
=
umit
+
"meWrapperFiltered/springs2/wrapper_filtered_springs2.fmu"
cfmu
=
umit
+
"gsl/clutch/clutch.fmu"
cfmu
=
umit
+
"gsl
2
/clutch
2
/clutch
2
.fmu"
class
clutch
(
module
):
""" Modules with two point masses hooked by a spring. This was first
...
...
@@ -78,7 +78,9 @@ class two_bodies(module):
### Now define simulations
def
make_parameters
(
filtered
,
N
,
init
,
masses
,
isprings
,
csprings
,
kinematic
=
False
):
def
make_parameters
(
filtered
,
N
,
init
,
masses
,
isprings
,
csprings
,
kinematic
=
False
,
order
=
0
):
"""
Do this as a function so we can reuse this mess for both force-velocity
and kinematic couplings. Both types of couplings are returned.
...
...
@@ -123,11 +125,13 @@ def make_parameters(filtered, N, init, masses, isprings, csprings, kinematic=Fal
# couplings+= ("m%d" %i, "x1", "v1", "a1", "f1",
# "m%d" %(i+1), "x2","v2","a2","f2"),
else
:
for
i
in
range
(
N
-
1
):
# this is hard wired to go left to right,
# output velocity, input forces
signals
+=
(
"m%d"
%
i
,
"out_v2"
,
"m%d"
%
(
i
+
1
),
"in_v1"
),
signals
+=
(
"m%d"
%
(
i
+
1
),
"out_f1"
,
"m%d"
%
(
i
),
"in_f2"
),
if
order
==
0
:
for
i
in
range
(
N
-
1
):
signals
+=
(
"m%d"
%
i
,
"out_v2"
,
"m%d"
%
(
i
+
1
),
"in_v1"
),
signals
+=
(
"m%d"
%
(
i
+
1
),
"out_f1"
,
"m%d"
%
(
i
),
"in_f2"
),
else
:
signals
+=
(
"m%d"
%
i
,
"out_f2"
,
"m%d"
%
(
i
+
1
),
"in_v1"
),
signals
+=
(
"m%d"
%
(
i
+
1
),
"out_v1"
,
"m%d"
%
(
i
),
"in_v2"
),
return
fmus
,
couplings
,
signals
...
...
@@ -146,12 +150,13 @@ class chain_mass_springs(simulation):
"""
def
__init__
(
self
,
filtered
,
N
,
init
,
masses
,
isprings
,
csprings
,
kinematic
=
False
,
annotations
=
()):
kinematic
=
False
,
annotations
=
()
,
order
=
0
):
fmus
,
couplings
,
signals
=
make_parameters
(
filtered
,
N
,
init
,
masses
,
isprings
,
csprings
,
kinematic
=
kinematic
)
kinematic
=
kinematic
,
order
=
order
)
simulation
.
__init__
(
self
,
fmus
,
name
=
"me_wrapped_spring_damper"
,
...
...
tools/pygo/examples/wrappedplot.py
View file @
37a3d6a9
...
...
@@ -7,8 +7,8 @@ import pickle as pickle
import
numpy
as
np
eps
=
1e-16
## this to get proper labels on plots
rcParams
[
'text.latex.preamble'
]
=
[
"
\\
usepackage{amsmath}"
]
rcParams
[
'text.latex.preview'
]
=
False
rcParams
[
'text.latex.unicode'
]
=
False
...
...
@@ -16,25 +16,32 @@ rcParams['text.usetex'] = True
def
count_steps
(
data
):
""" Determines how many different communication steps there are """
d
=
{}
for
i
in
data
:
d
[
i
.
get_attr
(
"comm_step"
)]
=
i
.
get_attr
(
"comm_step"
)
i
.
get_attr
(
"comm_step"
)
return
len
(
d
)
def
get_length
(
s
):
x
=
re
.
search
(
"[\d]+$"
,
s
)
# there can be only one or no match here
n
=
0
if
x
:
n
=
int
(
x
.
group
(
0
))
return
n
""" Determine the length of the string from it's name which is
prepended as:
"foobar%d" % N
"""
x
=
re
.
search
(
"[\d]+$"
,
s
)
# there can be only one or no match here
n
=
0
if
x
:
n
=
int
(
x
.
group
(
0
))
return
n
class
match_config
():
"""Provide a functor to see if two configurations are equivalent for a
given number of fields. This is used when traversing the H5 file to
select simulations to be analyzed.
In the constructor, `c' is the reference config and `f' is a *list* of
fields to match.
There is an issue: there is no == operator for ndarrays at least, but
other objects might also have the same problem.
"""
def
__init__
(
self
,
c
,
f
):
self
.
config
=
c
self
.
fields
=
f
# a list
...
...
@@ -64,64 +71,36 @@ def get_simulations(fname, config ):
initial conditions, spring constants, and 'k' factor.
"""
f
=
tb
.
open_file
(
fname
)
## kinematic coupling implies k == float(0) by definition so the
## 'config' dictionary for these simulation always has k == float(0)
## which means that if we want to match a given config, then we have to
## ignore k
match_conf
=
match_config
(
config
,
[
"N"
,
"masses"
,
"isprings"
])
kin
=
sorted
(
package_simulations
(
mlocate_nodes
(
f
.
root
,
"Group"
,
conds
=
[
match_value
(
"kinematic"
,
"coupling"
),
match_value
(
match_conf
,
"config"
),
]
),
f
,
close_file
=
False
),
key
=
lambda
y
:
float
(
y
.
get_attr
(
"comm_step"
)
),
reverse
=
True
)
match_conf
.
update
(
"k"
)
filtered_parallel
=
sorted
(
package_simulations
(
mlocate_nodes
(
f
.
root
,
"Group"
,
conds
=
[
match_value
(
"signals"
,
"coupling"
),
#match_value(match_conf, "config"),
match_value
(
re
.
compile
(
"filtered_.*"
),
"variant"
),
match_value
(
re
.
compile
(
"True"
),
"parallel"
),
]
),
f
,
close_file
=
False
),
key
=
lambda
y
:
float
(
y
.
get_attr
(
"comm_step"
)
),
reverse
=
True
)
filtered_sequential
=
sorted
(
package_simulations
(
mlocate_nodes
(
f
.
root
,
"Group"
,
conds
=
[
#match_value(match_conf, "config"),
match_value
(
"signals"
,
"coupling"
),
match_value
(
re
.
compile
(
"filtered_.*"
),
"variant"
),
match_value
(
re
.
compile
(
"False"
),
"parallel"
),
]
),
f
,
close_file
=
False
),
key
=
lambda
y
:
float
(
y
.
get_attr
(
"comm_step"
)
),
reverse
=
True
)
plain_parallel
=
sorted
(
package_simulations
(
mlocate_nodes
(
f
.
root
,
"Group"
,
conds
=
[
#match_value(match_conf, "config"),
match_value
(
"signals"
,
"coupling"
),
match_value
(
re
.
compile
(
"plain_.*"
),
"variant"
),
match_value
(
re
.
compile
(
"[Tt]rue"
),
"parallel"
),
]
),
f
,
close_file
=
False
),
key
=
lambda
y
:
float
(
y
.
get_attr
(
"comm_step"
)
),
reverse
=
True
)
plain_sequential
=
sorted
(
package_simulations
(
mlocate_nodes
(
f
.
root
,
"Group"
,
conds
=
[
match_value
(
"signals"
,
"coupling"
),
#match_value(match_conf, "config"),
match_value
(
re
.
compile
(
"plain_.*"
),
"variant"
),
match_value
(
re
.
compile
(
"False"
),
"parallel"
),
]
),
f
,
close_file
=
True
),
key
=
lambda
y
:
float
(
y
.
get_attr
(
"comm_step"
)
),
reverse
=
True
)
return
[
kin
,
filtered_parallel
,
filtered_sequential
,
plain_parallel
,
plain_sequential
]
sims
=
{}
simsref
=
{}
# do kinematic separately to avoid crazy logic
mc
=
match_config
(
config
,
[
"N"
,
"masses"
,
"isprings"
])
cc
=
[
match_value
(
mc
,
"config"
)
]
sims
[
"kinematic"
]
=
sorted
(
package_simulations
(
mlocate_nodes
(
f
.
root
,
"Group"
,
conds
=
cc
),
f
,
close_file
=
False
),
key
=
lambda
y
:
float
(
y
.
get_attr
(
"comm_step"
)
),
reverse
=
True
)
simsref
[
"kinematic"
]
=
# need the `k' parameter for the other ones
mc
.
update
(
"k"
)
for
i
in
[
"filtered_"
,
"plain_"
]:
for
k
in
[
True
,
False
]:
# parallel or not
# assemble the filter line
choose
=
cc
+
[
match_value
(
"signals"
,
"coupling"
),
match_value
(
re
.
compile
(
str
(
k
)),
"parallel"
),
match_value
(
re
.
compile
(
"%s.*"
%
i
,
re
.
I
),
"variant"
)]
key
=
i
+
(
"parallel"
if
k
else
"sequential"
)
sims
[
key
]
=
sorted
(
package_simulations
(
mlocate_nodes
(
f
.
root
,
"Group"
,
conds
=
choose
),
f
,
close_file
=
False
),
key
=
lambda
y
:
float
(
y
.
get_attr
(
"comm_step"
)
),
reverse
=
True
)
f
.
close
()
return
sims
def
get_violation
(
sim
):
""" This has a lot of hard coded stuff and should be fixed.
What it does here is provide a time sequence of
"""
n
=
len
(
sim
)
dx
=
0.
dv
=
0.
...
...
@@ -154,6 +133,7 @@ def rms_error(pos, f, l):
def
convergence
(
f
,
l
):
eps
=
1e-16
# needed for loglog plots
lgd
=
[]
figure
(
f
)
clf
()
...
...
tools/pygo/lib/h5load.py
View file @
37a3d6a9
...
...
@@ -160,14 +160,20 @@ class attributes():
return
None
class
sim_data
():
"""
This stores the data found in the HDF5 file and provides a few
services.
"""
def
__init__
(
self
,
sim
,
fmus
,
metadata
):
self
.
sim
=
sim
self
.
fmus
=
fmus
self
.
metadata
=
metadata
## constraint violations are stored as eqXXX_g
pos
=
re
.
compile
(
"eq[\d]+_g"
)
l
=
[
x
for
x
in
list
(
self
.
sim
.
dtype
.
names
)
if
pos
.
fullmatch
(
x
)]
self
.
nc
=
len
(
l
)
self
.
pos
=
sim
[
l
]
## constraint velocity are stored as eqXXX_g
vel
=
re
.
compile
(
"eq[\d]+_gv"
)
l
=
[
x
for
x
in
list
(
self
.
sim
.
dtype
.
names
)
if
vel
.
fullmatch
(
x
)]
self
.
vel
=
sim
[
l
]
...
...
@@ -195,8 +201,7 @@ class sim_data():
def
set_attr
(
self
,
name
,
v
):
self
.
metadata
[
name
]
=
v
def
get_n_constraints
(
self
):
return
nc
return
self
.
nc
def
get_rms
(
self
,
x
):
return
np
.
sqrt
(
np
.
power
(
x
.
view
((
x
.
dtype
[
0
],
len
(
x
.
dtype
.
names
))),
2
).
mean
(
axis
=
1
)
).
view
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment