Skip to content

Commit 1354f07

Browse files
committed
Moving to require pandas 0.12.0, but default dependencies in Ubuntu 12.04 otherwise.
1 parent 1299bbf commit 1354f07

9 files changed

+134
-186
lines changed

.travis.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ before_install:
77
- sudo apt-get install -y -qq python-setuptools python-numpy python-scipy python-matplotlib python-nose python-coverage
88
- pip install cython pyyaml
99
- git clone git://github.com/pydata/pandas.git
10-
- cd pandas && git checkout v0.11.0 && python setup.py install && cd ..
10+
- cd pandas && git checkout v0.12.0 && python setup.py install && cd ..
1111
- pip freeze
1212
install:
1313
- python setup.py install

Vagrantfile

+4-102
Original file line numberDiff line numberDiff line change
@@ -10,111 +10,13 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
1010
# please see the online documentation at vagrantup.com.
1111

1212
# Every Vagrant virtual environment requires a box to build off of.
13-
config.vm.box = "saucy64"
13+
#config.vm.box = "saucy64"
14+
config.vm.box = "precise32"
1415

15-
config.vm.provision :shell, :path => "bootstrap.sh"
16+
config.vm.provision :shell, :path => "precise-bootstrap.sh"
1617

1718
# The url from where the 'config.vm.box' box will be fetched if it
1819
# doesn't already exist on the user's system.
19-
config.vm.box_url = "http://cloud-images.ubuntu.com/vagrant/saucy/current/saucy-server-cloudimg-amd64-vagrant-disk1.box"
20+
config.vm.box_url = "http://cloud-images.ubuntu.com/vagrant/precise/current/precise-server-cloudimg-i386-vagrant-disk1.box"
2021

21-
# Create a forwarded port mapping which allows access to a specific port
22-
# within the machine from a port on the host machine. In the example below,
23-
# accessing "localhost:8080" will access port 80 on the guest machine.
24-
# config.vm.network :forwarded_port, guest: 80, host: 8080
25-
26-
# Create a private network, which allows host-only access to the machine
27-
# using a specific IP.
28-
# config.vm.network :private_network, ip: "192.168.33.10"
29-
30-
# Create a public network, which generally matched to bridged network.
31-
# Bridged networks make the machine appear as another physical device on
32-
# your network.
33-
# config.vm.network :public_network
34-
35-
# If true, then any SSH connections made will enable agent forwarding.
36-
# Default value: false
37-
# config.ssh.forward_agent = true
38-
39-
# Share an additional folder to the guest VM. The first argument is
40-
# the path on the host to the actual folder. The second argument is
41-
# the path on the guest to mount the folder. And the optional third
42-
# argument is a set of non-required options.
43-
# config.vm.synced_folder "../data", "/vagrant_data"
44-
45-
# Provider-specific configuration so you can fine-tune various
46-
# backing providers for Vagrant. These expose provider-specific options.
47-
# Example for VirtualBox:
48-
#
49-
# config.vm.provider :virtualbox do |vb|
50-
# # Don't boot with headless mode
51-
# vb.gui = true
52-
#
53-
# # Use VBoxManage to customize the VM. For example to change memory:
54-
# vb.customize ["modifyvm", :id, "--memory", "1024"]
55-
# end
56-
#
57-
# View the documentation for the provider you're using for more
58-
# information on available options.
59-
60-
# Enable provisioning with Puppet stand alone. Puppet manifests
61-
# are contained in a directory path relative to this Vagrantfile.
62-
# You will need to create the manifests directory and a manifest in
63-
# the file base.pp in the manifests_path directory.
64-
#
65-
# An example Puppet manifest to provision the message of the day:
66-
#
67-
# # group { "puppet":
68-
# # ensure => "present",
69-
# # }
70-
# #
71-
# # File { owner => 0, group => 0, mode => 0644 }
72-
# #
73-
# # file { '/etc/motd':
74-
# # content => "Welcome to your Vagrant-built virtual machine!
75-
# # Managed by Puppet.\n"
76-
# # }
77-
#
78-
# config.vm.provision :puppet do |puppet|
79-
# puppet.manifests_path = "manifests"
80-
# puppet.manifest_file = "init.pp"
81-
# end
82-
83-
# Enable provisioning with chef solo, specifying a cookbooks path, roles
84-
# path, and data_bags path (all relative to this Vagrantfile), and adding
85-
# some recipes and/or roles.
86-
#
87-
# config.vm.provision :chef_solo do |chef|
88-
# chef.cookbooks_path = "../my-recipes/cookbooks"
89-
# chef.roles_path = "../my-recipes/roles"
90-
# chef.data_bags_path = "../my-recipes/data_bags"
91-
# chef.add_recipe "mysql"
92-
# chef.add_role "web"
93-
#
94-
# # You may also specify custom JSON attributes:
95-
# chef.json = { :mysql_password => "foo" }
96-
# end
97-
98-
# Enable provisioning with chef server, specifying the chef server URL,
99-
# and the path to the validation key (relative to this Vagrantfile).
100-
#
101-
# The Opscode Platform uses HTTPS. Substitute your organization for
102-
# ORGNAME in the URL and validation key.
103-
#
104-
# If you have your own Chef Server, use the appropriate URL, which may be
105-
# HTTP instead of HTTPS depending on your configuration. Also change the
106-
# validation key to validation.pem.
107-
#
108-
# config.vm.provision :chef_client do |chef|
109-
# chef.chef_server_url = "https://api.opscode.com/organizations/ORGNAME"
110-
# chef.validation_key_path = "ORGNAME-validator.pem"
111-
# end
112-
#
113-
# If you're using the Opscode platform, your validator client is
114-
# ORGNAME-validator, replacing ORGNAME with your organization name.
115-
#
116-
# If you have your own Chef Server, the default validation client name is
117-
# chef-validator, unless you changed the configuration.
118-
#
119-
# chef.validation_client_name = "ORGNAME-validator"
12022
end

dtk/test/test_process.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@ def test_coefficient_of_determination():
2020
# define a simple line with some measured data points
2121
num_samples = 100
2222
x = np.arange(num_samples)
23-
slope = np.random.choice([-1.0, 1.0]) * float(np.random.randint(1, 100))
24-
intercept = np.random.choice([-1.0, 1.0]) * float(np.random.randint(1, 100))
23+
slope = np.random.uniform(-100, 100)
24+
intercept = np.random.uniform(-100, 100)
2525
y = slope * x + intercept
2626
# add some noise to each to create fake measurements
2727
x_measured = x + 0.001 * np.random.random(num_samples)

dtk/test/test_walk.py

+73-78
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
# builtin
55
import os
6+
from time import strptime
67

78
# external
89
import numpy as np
@@ -26,6 +27,17 @@
2627
set_trace = Tracer()
2728

2829

30+
def compare_data_frames(actual, expected, rtol=1e-7, atol=0.0):
31+
"""Compares two data frames column by column for numerical
32+
equivalence."""
33+
34+
# Make sure all columns are present.
35+
assert sorted(list(expected.columns)) == sorted(list(actual.columns))
36+
37+
for col in expected.columns:
38+
testing.assert_allclose(actual[col], expected[col], rtol, atol)
39+
40+
2941
def test_find_constant_speed():
3042

3143
speed_array = np.loadtxt(os.path.join(os.path.dirname(__file__),
@@ -68,65 +80,6 @@ def test_interpolate():
6880

6981

7082
class TestDFlowData():
71-
"""we need class that deals with d flow data and running the hbm command
72-
line program
73-
74-
input
75-
-----
76-
77-
mocap module file path (or handle): times series of markers, force plate
78-
stuff, analog measurements this is basically at 100 hz, but not exactly
79-
80-
record module file path (or handle): variable sample rate of treadmill
81-
motions (or any other variables from dflow) and events
82-
83-
mocap compensation file path: with no loads for compensation
84-
85-
meta data file path (or handle)
86-
87-
output
88-
------
89-
pandas data frame with all measurements from both files at 100 hertz,
90-
missing values set at NA,
91-
92-
time start from 0 for all?
93-
94-
event times?
95-
96-
actions
97-
-------
98-
99-
look for metadata in directory of the files (search up the hierarchy?)
100-
parse meta data
101-
store meta data as a dictionary
102-
103-
parse record file for event times
104-
load time series from record file into data frame
105-
interpolate data in record file at 100 hertz
106-
107-
load time series from mocap file into data frame
108-
identify missing markers
109-
110-
load the compensation file
111-
compute the compensatation for the forces
112-
113-
generate hbm output with the mocap file
114-
load hbm outputs into data frames
115-
interpolate at 100 hertz
116-
117-
join all off the data frames in to one data frame at 100 hz sample rate
118-
with the time stamp starting at zero as the index
119-
120-
attributes
121-
----------
122-
123-
record module data path (or file handle)
124-
mocap module data path (or file handle)
125-
meta data file path (or file handle)
126-
hbmtest configuration details
127-
128-
129-
"""
13083

13184
cortex_start_frame = 2375
13285
cortex_sample_period = 0.01
@@ -149,6 +102,7 @@ class TestDFlowData():
149102
'Channel2.Anlg']
150103
dflow_hbm_labels = ['RKneeFlexion.Ang',
151104
'RKneeFlexion.Mom',
105+
'RKneeFlexion.Pow',
152106
'R_PectoralisMajorTH1',
153107
'L_RectusFemoris']
154108
mocap_labels_without_hbm = (['TimeStamp', 'FrameNumber'] +
@@ -160,15 +114,58 @@ class TestDFlowData():
160114
path_to_record_data_file = 'example_record_tsv_file.txt'
161115
path_to_meta_data_file = 'example_meta_data_file.yml'
162116

163-
meta_data = {'date': '2013-10-3',
164-
'trial number': 5,
165-
'project': 'projecta',
117+
meta_data = {'trial': {'id': 5,
118+
'datetime': strptime('2013-10-03', "%Y-%m-%d")},
119+
'subject': {'id': 234,
120+
'age': 28,
121+
'mass': 70,
122+
'mass-units': 'kilogram'},
123+
'study': {'id': 12,
124+
'name': 'Human Locomotion Control Identification',
125+
'description': 'Perturbations during walking and running.'},
126+
'files': [path_to_mocap_data_file,
127+
path_to_record_data_file],
166128
'events': {'A': 'Zeroing',
167129
'B': 'Walking',
168130
'C': 'Relaxing'},
131+
'units': {
132+
'.*\.Pos[XYZ]$': 'meters',
133+
'^[LR]_.*': 'newtons',
134+
'.*\.Mom$': 'newton-meters',
135+
'.*\.Ang$': 'degrees',
136+
'.*\.Pow$': 'watts'
137+
},
138+
'analog-channel-names': {
139+
"Channel1.Anlg": "F1Y1",
140+
"Channel2.Anlg": "F1Y2",
141+
"Channel3.Anlg": "F1Y3",
142+
"Channel4.Anlg": "F1X1",
143+
"Channel5.Anlg": "F1X2",
144+
"Channel6.Anlg": "F1Z1",
145+
"Channel7.Anlg": "F2Y1",
146+
"Channel8.Anlg": "F2Y2",
147+
"Channel9.Anlg": "F2Y3",
148+
"Channel10.Anlg": "F2X1",
149+
"Channel11.Anlg": "F2X2",
150+
"Channel12.Anlg": "F2Z1",
151+
"Channel13.Anlg": "Front_Left_EMG",
152+
"Channel14.Anlg": "Front_Left_AccX",
153+
"Channel15.Anlg": "Front_Left_AccY",
154+
"Channel16.Anlg": "Front_Left_AccZ",
155+
"Channel17.Anlg": "Back_Left_EMG",
156+
"Channel18.Anlg": "Back_Left_AccX",
157+
"Channel19.Anlg": "Back_Left_AccY",
158+
"Channel20.Anlg": "Back_Left_AccZ",
159+
"Channel21.Anlg": "Front_Right_EMG",
160+
"Channel22.Anlg": "Front_Right_AccX",
161+
"Channel23.Anlg": "Front_Right_AccY",
162+
"Channel24.Anlg": "Front_Right_AccZ",
163+
"Channel25.Anlg": "Back_Right_EMG",
164+
"Channel26.Anlg": "Back_Right_AccX",
165+
"Channel27.Anlg": "Back_Right_AccY",
166+
"Channel28.Anlg": "Back_Right_AccZ",
167+
}
169168
}
170-
# TODO : Add names for analog columns in DFlow because you can't name
171-
# analog signals uniquely in dflow
172169

173170
def create_sample_mocap_file(self):
174171
"""
@@ -199,8 +196,9 @@ def create_sample_mocap_file(self):
199196
# This generates the slightly variable sampling periods seen in the
200197
# time stamp column.
201198
deviations = (self.dflow_mocap_max_period_deviation *
202-
np.random.choice([-1.0, 1.0]) *
203-
np.random.random(self.cortex_number_of_samples))
199+
np.random.uniform(-1.0, 1.0,
200+
self.cortex_number_of_samples))
201+
204202
variable_periods = (self.cortex_sample_period *
205203
np.ones(self.cortex_number_of_samples) +
206204
deviations)
@@ -262,19 +260,20 @@ def create_sample_record_file(self):
262260
np.random.random(self.dflow_record_number_of_samples)
263261

264262
self.record_data_frame = pandas.DataFrame(record_data)
263+
# TODO : Pandas 0.11.0 does not have a cols argument.
264+
# http://pandas.pydata.org/pandas-docs/version/0.10.1/generated/pandas.Series.to_csv.html
265265
self.record_data_frame.to_csv(self.path_to_record_data_file,
266266
sep='\t', float_format='%1.6f',
267267
index=False, cols=['Time',
268268
'LeftBeltSpeed',
269269
'RightBeltSpeed'])
270-
271270
event_template = "#\n# EVENT {} - COUNT {}\n#\n"
272271

273272
time = self.record_data_frame['Time']
274273

275-
event_times = {'A': np.random.choice(time),
276-
'B': np.random.choice(time),
277-
'C': np.random.choice(time)}
274+
event_times = {'A': time[333],
275+
'B': time[784],
276+
'C': time[955]}
278277

279278
# This loops through the record file and inserts the events.
280279
new_lines = ''
@@ -445,9 +444,7 @@ def test_load_mocap_data(self):
445444
dflow_data._store_hbm_column_labels(dflow_data.mocap_column_labels)
446445
raw_mocap_data = dflow_data._load_mocap_data()
447446

448-
testing.assert_allclose(raw_mocap_data.sort(axis=1).values,
449-
self.mocap_data_frame.sort(axis=1).values,
450-
atol=1e-6)
447+
compare_data_frames(raw_mocap_data, self.mocap_data_frame, atol=1e-6)
451448

452449
# TODO : Add some missing values into the HBM columns of
453450
# self.mocap_data_frame and make sure they get replaced with NaN.
@@ -456,8 +453,7 @@ def test_load_mocap_data(self):
456453

457454
expected = self.mocap_data_frame[self.mocap_labels_without_hbm]
458455

459-
testing.assert_allclose(raw_mocap_data.sort(axis=1).values,
460-
expected.sort(axis=1).values, atol=1e-6)
456+
compare_data_frames(raw_mocap_data, expected, atol=1e-6)
461457

462458
def test_extract_events_from_record_file(self):
463459
pass
@@ -466,9 +462,8 @@ def test_load_record_data(self):
466462
dflow_data = DFlowData(record_tsv_path=self.path_to_record_data_file)
467463
raw_record_data = dflow_data._load_record_data()
468464

469-
testing.assert_allclose(raw_record_data.sort(axis=1).values,
470-
self.record_data_frame.sort(axis=1).values,
471-
atol=1e-6)
465+
compare_data_frames(raw_record_data, self.record_data_frame,
466+
atol=1e-6)
472467

473468
def test_resample_record_data(self):
474469
dflow_data = DFlowData(self.path_to_mocap_data_file,

dtk/walk.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,8 @@ def _store_hbm_column_labels(self, mocap_column_labels):
143143
self.hbm_column_labels = []
144144
reg_exps = [re.compile('^[LR]_.*'),
145145
re.compile('.*\.Mom$'),
146-
re.compile('.*\.Ang$')]
146+
re.compile('.*\.Ang$'),
147+
re.compile('.*\.Pow$')]
147148
for i, label in enumerate(mocap_column_labels):
148149
if any(exp.match(label) for exp in reg_exps):
149150
self.hbm_indices.append(i)

0 commit comments

Comments
 (0)