repo stringlengths 2 99 | file stringlengths 13 225 | code stringlengths 0 18.3M | file_length int64 0 18.3M | avg_line_length float64 0 1.36M | max_line_length int64 0 4.26M | extension_type stringclasses 1
value |
|---|---|---|---|---|---|---|
ChromaStarPy | ChromaStarPy-master/HjertingComponents.py | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 29 13:21:01 2017
@author: Ian
"""
def hjertingComponents():
"""//Hjerting function components (expansion coefficients in Voigt fn "a" parameter):
// Observation and Analysis of Stellar Photospehres -, 3rd. Ed., Tab 11.5, p. 256
// David F. Gray"""
#... | 14,931 | 131.141593 | 177 | py |
ChromaStarPy | ChromaStarPy-master/LineProf.py | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 29 14:26:42 2017
@author: Ian
"""
import math
import Useful
import ToolBox
"""/**
* Line profile, phi_lambda(lambda): Assume Voigt function profile - need H(a,v)
* Assumes CRD, LTE, ??? Input parameters: lam0 - line center wavelength in nm
* mass - mass of absorbing p... | 30,137 | 42.17765 | 162 | py |
ChromaStarPy | ChromaStarPy-master/SpecSyn2.py | # -*- coding: utf-8 -*-
"""
Created on Fri Apr 28 17:03:30 2017
@author: ishort
"""
#/**
# *
# * Create master kappa_lambda(lambda) and tau_lambda(lambda) for
# * FormalSoln.formalSoln()
# *
# * @author Ian
# */
import math
import ToolBox
#plotting:
import matplotlib
import pylab
import numpy
def masterLambda(numLa... | 5,805 | 39.601399 | 140 | py |
ChromaStarPy | ChromaStarPy-master/ToolBox.py | # -*- coding: utf-8 -*-
"""
Created on Fri Apr 21 10:41:24 2017
Collection of useful utilities
@author: ishort
"""
import math
import numpy
#JB#
#a function to create a cubic function fit extrapolation
def cubicFit(x,y):
coeffs = numpy.polyfit(x,y,3)
#returns an array of coefficents for the cub... | 7,243 | 24.687943 | 144 | py |
miccai2022-roigan | miccai2022-roigan-main/main.py | import os
import argparse
import yaml
import collections
import itertools
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from torchvision import datasets
from sklearn.model_selection import train_test_split
from src import models, utils
def main(arg... | 10,158 | 36.487085 | 201 | py |
miccai2022-roigan | miccai2022-roigan-main/src/utils.py | import os
import sys
import h5py
import random
import numpy as np
import pandas as pd
import torch
from torch.autograd import Variable
from torchvision.utils import save_image, make_grid
from torch.utils.data import DataLoader
def write_flush(*text_args, stream=sys.stdout):
stream.write(', '.join(map(str, text_... | 7,345 | 32.543379 | 124 | py |
miccai2022-roigan | miccai2022-roigan-main/src/models.py | import torch
import torch.nn as nn
from torchvision.ops import RoIAlign
def weights_init_normal(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
torch.nn.init.normal_(m.weight.data, 0.0, 0.02)
if hasattr(m, 'bias') and m.bias is not None:
torch.nn.init.constant... | 5,076 | 31.132911 | 99 | py |
miccai2022-roigan | miccai2022-roigan-main/data/library_utils.py | import os
import sys
import h5py
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from cv2 import addWeighted
from PIL import Image, ImageDraw
from skimage.color import rgb2gray
from skimage.filters import gaussian, threshold_otsu
from skimage.morphology import closing, dilation, disk
from skima... | 3,897 | 28.755725 | 83 | py |
hgp | hgp-main/setup.py | from setuptools import setup, find_packages
setup(
name="hgp",
version="0.0.1",
author="Magnus Ross",
author_email="[email protected]",
url="https://github.com/magnusross/hgp",
packages=find_packages(),
python_requires=">=3.9",
)
| 279 | 22.333333 | 57 | py |
hgp | hgp-main/hgp/__init__.py | import hgp.models
import hgp.misc
import hgp.datasets
import hgp.core
| 70 | 13.2 | 19 | py |
hgp | hgp-main/hgp/core/kernels.py | # MIT License
# Copyright (c) 2021 Pashupati Hegde.
# Copyright (c) 2023 Magnus Ross.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the r... | 8,840 | 34.939024 | 121 | py |
hgp | hgp-main/hgp/core/flow.py | # MIT License
# Copyright (c) 2021 Pashupati Hegde.
# Copyright (c) 2023 Magnus Ross.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the r... | 4,501 | 36.831933 | 115 | py |
hgp | hgp-main/hgp/core/constraint_likelihoods.py | import torch
import torch.nn as nn
from torch import distributions
from torch.nn import init
from hgp.misc.constraint_utils import invsoftplus, softplus
class Gaussian(nn.Module):
"""
Gaussian likelihood with an optionally trainable scale parameter
"""
def __init__(
self, d: int = 1, scale: ... | 2,282 | 29.44 | 85 | py |
hgp | hgp-main/hgp/core/dsvgp.py | # MIT License
# Copyright (c) 2021 Pashupati Hegde.
# Copyright (c) 2023 Magnus Ross.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the r... | 12,813 | 39.169279 | 138 | py |
hgp | hgp-main/hgp/core/nn.py | import functorch
import torch
from torch import nn
from hgp.misc.ham_utils import build_J
def Linear(chin, chout, zero_bias=False, orthogonal_init=False):
linear = nn.Linear(chin, chout)
if zero_bias:
torch.nn.init.zeros_(linear.bias)
if orthogonal_init:
torch.nn.init.orthogonal_(linear.w... | 1,637 | 25.852459 | 84 | py |
hgp | hgp-main/hgp/core/states.py | # MIT License
# Copyright (c) 2021 Pashupati Hegde.
# Copyright (c) 2023 Magnus Ross.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the r... | 10,358 | 31.990446 | 99 | py |
hgp | hgp-main/hgp/core/observation_likelihoods.py | # MIT License
# Copyright (c) 2021 Pashupati Hegde.
# Copyright (c) 2023 Magnus Ross.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the r... | 2,157 | 33.253968 | 88 | py |
hgp | hgp-main/hgp/models/sequence.py | # MIT License
# Copyright (c) 2021 Pashupati Hegde.
# Copyright (c) 2023 Magnus Ross.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the r... | 19,534 | 34.261733 | 118 | py |
hgp | hgp-main/hgp/models/initialization.py | # MIT License
# Copyright (c) 2021 Pashupati Hegde.
# Copyright (c) 2023 Magnus Ross.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the r... | 11,106 | 37.835664 | 96 | py |
hgp | hgp-main/hgp/models/__init__.py | import hgp.models.builder
import hgp.models.initialization
import hgp.models.sequence
| 86 | 20.75 | 32 | py |
hgp | hgp-main/hgp/models/builder.py | # MIT License
# Copyright (c) 2021 Pashupati Hegde.
# Copyright (c) 2023 Magnus Ross.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the r... | 31,938 | 30.716981 | 135 | py |
hgp | hgp-main/hgp/datasets/__init__.py | 0 | 0 | 0 | py | |
hgp | hgp-main/hgp/datasets/hamiltonians.py | # import numpy as np
import functorch
import numpy as np
import torch
from torchdiffeq import odeint
from hgp.misc.ham_utils import build_J
from hgp.misc.torch_utils import numpy2torch, torch2numpy
# from scipy.integrate import odeint
class Data:
def __init__(self, ys, ts):
self.ts = ts.astype(np.float3... | 6,888 | 26.890688 | 86 | py |
hgp | hgp-main/hgp/misc/plot_utils.py | from hgp.misc.torch_utils import torch2numpy, numpy2torch
import matplotlib.pyplot as plt
from matplotlib import cm
import matplotlib
import shutil
import numpy as np
from hgp.models.builder import compute_summary
def plot_predictions(data, test_pred, save=None, test_true=None, model_name="Model"):
test_ts, te... | 7,650 | 31.012552 | 88 | py |
hgp | hgp-main/hgp/misc/settings.py | import torch
import numpy
class Settings:
def __init__(self):
pass
@property
def torch_int(self):
return torch.int32
@property
def numpy_int(self):
return numpy.int32
@property
def device(self):
# return torch.device('cpu')
# return torch.device('... | 630 | 16.054054 | 77 | py |
hgp | hgp-main/hgp/misc/ham_utils.py | import torch
def build_J(D_in):
assert D_in % 2 == 0
I = torch.eye(D_in // 2)
zeros = torch.zeros((D_in // 2, D_in // 2))
zI = torch.hstack((zeros, I))
mIz = torch.hstack((-I, zeros))
return torch.vstack((zI, mIz))
| 241 | 21 | 47 | py |
hgp | hgp-main/hgp/misc/train_utils.py | import random
import numpy as np
import torch
def seed_everything(seed):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.use_deterministic_algorithms(True)
def get_logger(logpath, filepath, add_stdout=True):
logger = logging.getLogger()
l... | 2,731 | 23.836364 | 88 | py |
hgp | hgp-main/hgp/misc/torch_utils.py | from hgp.misc.settings import settings
import numpy as np
import torch
device = settings.device
dtype = settings.torch_float
def numpy2torch(x):
return (
torch.tensor(x, dtype=dtype).to(device)
if type(x) is np.ndarray
else x.to(device)
)
def torch2numpy(x):
return x if type(x) ... | 1,487 | 22.619048 | 85 | py |
hgp | hgp-main/hgp/misc/metrics.py | import numpy as np
from scipy.special import logsumexp
from scipy.stats import norm
def log_lik(actual, predicted, noise_var):
lik_samples = norm.logpdf(actual, loc=predicted, scale=noise_var**0.5)
lik = logsumexp(lik_samples, 0, b=1 / float(predicted.shape[0]))
return lik
def mse(actual, predicted):
... | 561 | 27.1 | 82 | py |
hgp | hgp-main/hgp/misc/__init__.py | 0 | 0 | 0 | py | |
hgp | hgp-main/hgp/misc/constraint_utils.py | import torch
import torch.nn.functional as F
def softplus(x):
lower = 1e-12
return F.softplus(x) + lower
def invsoftplus(x):
lower = 1e-12
xs = torch.max(x - lower, torch.tensor(torch.finfo(x.dtype).eps).to(x))
return xs + torch.log(-torch.expm1(-xs))
| 276 | 18.785714 | 75 | py |
hgp | hgp-main/hgp/misc/param.py | import numpy as np
import torch
from hgp.misc import transforms
from hgp.misc.settings import settings
class Param(torch.nn.Module):
"""
A class to handle contrained --> unconstrained optimization using variable transformations.
Similar to Parameter class in GPflow : https://github.com/GPflow/GPflow/blob... | 913 | 31.642857 | 103 | py |
hgp | hgp-main/hgp/misc/transforms.py | from hgp.misc.settings import settings
import numpy as np
import torch
import torch.nn.functional as F
class Identity:
def __init__(self):
pass
def __str__(self):
return "Identity transformation"
def forward_tensor(self, x):
return x
def backward_tensor(self, y):
re... | 3,936 | 27.323741 | 88 | py |
hgp | hgp-main/tests/test_kernels.py | import pytest
import hgp.core.kernels as kernels
import torch
@pytest.fixture()
def t():
return 1 * torch.randn(10, 4)
@pytest.fixture()
def kernel():
k = kernels.DerivativeRBF(4)
return k
def test_single_k(t, kernel):
K = kernel.K(t)
for i in range(10):
for j in range(10):
... | 3,261 | 24.286822 | 77 | py |
hgp | hgp-main/tests/__init__.py | 0 | 0 | 0 | py | |
hgp | hgp-main/experiments/initial_pendulum/experiment.py | import logging
import os
import hydra
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import torch
from matplotlib import cm
from matplotlib.collections import LineCollection
from matplotlib.legend import _get_legend_handles_labels
from omegaconf import DictConfig
import hgp
from hgp.datasets.ham... | 6,928 | 31.078704 | 86 | py |
hgp | hgp-main/experiments/forward_trajectory/experiment.py | import logging
import os
import pickle
from distutils.dir_util import copy_tree
from pathlib import Path
import hydra
import numpy as np
import torch
from omegaconf import DictConfig
import hgp
from hgp.datasets.hamiltonians import load_system_from_name
from hgp.misc.plot_utils import (
plot_comparison_traces,
... | 4,625 | 30.469388 | 83 | py |
hgp | hgp-main/experiments/multiple_trajectory/experiment.py | import logging
import os
import pickle
from distutils.dir_util import copy_tree
from pathlib import Path
import hydra
import numpy as np
import torch
from omegaconf import DictConfig
import hgp
from hgp.datasets.hamiltonians import load_system_from_name
from hgp.misc.plot_utils import (
plot_comparison_traces,
... | 5,012 | 29.944444 | 83 | py |
SubGNN | SubGNN-main/config.py |
from pathlib import Path
# directory where data and results will be stored
# TODO: *UPDATE TO YOUR DIRECTORY OF CHOICE*
PROJECT_ROOT = Path('/mnt/subgraphs/data_to_release')
# padding
PAD_VALUE = 0
| 202 | 17.454545 | 53 | py |
SubGNN | SubGNN-main/SubGNN/test.py | import sys
sys.path.insert(0, '..') # add config to path
import config
import train as tr
import os
import json
import random
import numpy as np
import argparse
class Namespace:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def parse_arguments():
parser = argparse.ArgumentParser(descripti... | 4,436 | 40.858491 | 130 | py |
SubGNN | SubGNN-main/SubGNN/anchor_patch_samplers.py | # General
import numpy as np
import random
from collections import defaultdict
import networkx as nx
import sys
import time
# Pytorch
import torch
# Our Methods
sys.path.insert(0, '..') # add config to path
import config
import subgraph_utils
#######################################################
# Triangular Rando... | 23,117 | 51.901602 | 179 | py |
SubGNN | SubGNN-main/SubGNN/subgraph_utils.py | # General
import typing
import sys
import numpy as np
#Networkx
import networkx as nx
# Sklearn
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.metrics import f1_score, accuracy_score
# Pytorch
import torch
import torch.nn.functional as F
import torch.nn as nn
from torch.nn.functional import one_h... | 10,206 | 41.886555 | 172 | py |
SubGNN | SubGNN-main/SubGNN/subgraph_mpn.py | # General
import numpy as np
import sys
from multiprocessing import Pool
import time
# Pytorch
import torch
import torch.nn as nn
import torch.nn.functional as F
# Pytorch Geometric
from torch_geometric.utils import add_self_loops
from torch_geometric.nn import MessagePassing
# Our methods
sys.path.insert(0, '..') #... | 12,371 | 50.123967 | 223 | py |
SubGNN | SubGNN-main/SubGNN/datasets.py | # Pytorch
import torch
import torch.nn as nn
from torch.utils.data import DataLoader, Dataset
# Typing
from typing import List
class SubgraphDataset(Dataset):
'''
Stores subgraphs and their associated labels as well as precomputed similarities and border sets for the subgraphs
'''
def __init__(self, ... | 1,904 | 31.844828 | 130 | py |
SubGNN | SubGNN-main/SubGNN/train_config.py | # General
import numpy as np
import random
import argparse
import tqdm
import pickle
import json
import commentjson
import joblib
import os
import sys
import pathlib
from collections import OrderedDict
import random
import string
# Pytorch
import torch
from torch.utils.data import DataLoader
from torch.nn.functional i... | 11,383 | 39.226148 | 156 | py |
SubGNN | SubGNN-main/SubGNN/SubGNN.py | # General
import os
import numpy as np
from pathlib import Path
import typing
import time
import json
import copy
from typing import Dict, List
import multiprocessing
from multiprocessing import Pool
from itertools import accumulate
from collections import OrderedDict
import pickle
import sys
from functools import par... | 64,806 | 54.676117 | 326 | py |
SubGNN | SubGNN-main/SubGNN/gamma.py | # General
import sys
import time
import numpy as np
# Pytorch & Networkx
import torch
import networkx as nx
# Dynamic time warping
from fastdtw import fastdtw
# Our methods
sys.path.insert(0, '..') # add config to path
import config
###########################################
# DTW of degree sequences
def get_de... | 1,810 | 28.209677 | 126 | py |
SubGNN | SubGNN-main/SubGNN/attention.py | import torch
from torch.nn.parameter import Parameter
# All of the below code is taken from AllenAI's AllenNLP library
def tiny_value_of_dtype(dtype: torch.dtype):
"""
Returns a moderately tiny value for a given PyTorch data type that is used to avoid numerical
issues such as division by zero.
This is... | 6,880 | 47.801418 | 107 | py |
SubGNN | SubGNN-main/SubGNN/train.py | # General
import numpy as np
import random
import argparse
import tqdm
import pickle
import json
import joblib
import os
import time
import sys
import pathlib
import random
import string
# Pytorch
import torch
from torch.utils.data import DataLoader
from torch.nn.functional import one_hot
import pytorch_lightning as p... | 21,662 | 42.5 | 170 | py |
SubGNN | SubGNN-main/prepare_dataset/train_node_emb.py | # General
import numpy as np
import random
import argparse
import os
import config_prepare_dataset as config
import preprocess
import model as mdl
import utils
# Pytorch
import torch
from torch_geometric.utils.convert import to_networkx, to_scipy_sparse_matrix
from torch_geometric.data import Data, DataLoader, Neighb... | 9,326 | 48.611702 | 334 | py |
SubGNN | SubGNN-main/prepare_dataset/utils.py | # General
import random
import numpy as np
# Pytorch
import torch
import torch.nn.functional as F
from torch.nn import Sigmoid
from torch_geometric.data import Dataset
# Matplotlib
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
# Sci-kit Learn
from sklearn.metrics import ro... | 7,302 | 32.810185 | 144 | py |
SubGNN | SubGNN-main/prepare_dataset/model.py | # Pytorch
import torch
import torch.nn as nn
from torch.nn import Linear, LayerNorm, ReLU
from torch_geometric.nn import GINConv, GCNConv
import torch.nn.functional as F
# General
import numpy as np
import torch
import utils
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
class TrainNet(nn.... | 1,045 | 27.27027 | 69 | py |
SubGNN | SubGNN-main/prepare_dataset/config_prepare_dataset.py | from pathlib import Path
import sys
sys.path.insert(0, '..') # add config to path
import config as general_config
# Output directory ('density' as an example)
DATASET_DIR = Path(general_config.PROJECT_ROOT) / "density"
# Flags
GENERATE_SYNTHETIC_G = True # whether to generate synthetic graph with below specified pro... | 1,940 | 28.409091 | 100 | py |
SubGNN | SubGNN-main/prepare_dataset/prepare_dataset.py | # General
import numpy as np
import random
import typing
import logging
from collections import Counter, defaultdict
import config_prepare_dataset as config
import os
if not os.path.exists(config.DATASET_DIR):
os.makedirs(config.DATASET_DIR)
import train_node_emb
# Pytorch
import torch
from torch_geometric.data ... | 36,300 | 42.63101 | 152 | py |
SubGNN | SubGNN-main/prepare_dataset/precompute_graph_metrics.py | # General
import networkx as nx
import sys
import argparse
import snap
from pathlib import Path
import numpy as np
import json
import os
import multiprocessing
# Our methods
import config_prepare_dataset as config
'''
Use this script to precompute information about the underlying base graph.
'''
def get_shortest_pa... | 2,858 | 35.189873 | 94 | py |
SubGNN | SubGNN-main/prepare_dataset/preprocess.py | # General
import numpy as np
import random
import pickle
from collections import Counter
# Pytorch
import torch
from torch_geometric.data import Data
from torch_geometric.utils import from_networkx, negative_sampling
from torch_geometric.utils.convert import to_networkx
# NetworkX
import networkx as nx
from networkx.... | 3,076 | 27.490741 | 152 | py |
B-SOID | B-SOID-master/bsoid_app.py | from streamlit import caching
from bsoid_app import data_preprocess, extract_features, clustering, machine_learner, \
export_training, video_creator, predict
from bsoid_app.bsoid_utilities import visuals
from bsoid_app.bsoid_utilities.load_css import local_css
from bsoid_app.bsoid_utilities.load_workspace import *... | 4,644 | 57.797468 | 115 | py |
B-SOID | B-SOID-master/bsoid_figs/fig3.py | import sys
import subprocess
print('\n \n \n B-SOID QUANTIFICATION \n \n \n')
path = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/workspace/l5neural5ms_.mat'
fig_format = 'png'
outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/neural_data/'
print('\n DATA FROM {} \n'.format(path))
pri... | 1,115 | 31.823529 | 117 | py |
B-SOID | B-SOID-master/bsoid_figs/fig2.py | import sys
import subprocess
import os
print('\n \n \n MODEL PERFORMANCE \n \n \n')
path = '/Volumes/Elements/B-SOID/output3/'
fig_format = 'png'
outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/model_performance/'
print('\n DATA FROM {} \n'.format(path))
print('-' * 50)
# FIG2A
vidpath = ... | 4,182 | 33.570248 | 101 | py |
B-SOID | B-SOID-master/bsoid_figs/figS6.py | import sys
import subprocess
print('\n \n \n A2A CASPASE KINEMATICS ANALYSIS \n \n \n')
path = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/workspace/a2a_loc_Rhkin.mat'
fig_format = 'png'
outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/kinematics_cdf/'
print('\n DATA FROM {} \n'.for... | 2,703 | 30.08046 | 100 | py |
B-SOID | B-SOID-master/bsoid_figs/fig5.py | import sys
import subprocess
print('\n \n \n B-SOID QUANTIFICATION \n \n \n')
path = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/workspace/MvsBvsS_zscore_mse3.mat'
fig_format = 'png'
outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/motion_energy/'
print('\n DATA FROM {} \n'.format(p... | 1,812 | 32.574074 | 117 | py |
B-SOID | B-SOID-master/bsoid_figs/github_hist.py | import sys
import subprocess
# print('\n \n \n GENERATE POSE RELATIONSHIPS HISTOGRAMS \n \n \n')
# path = '/Volumes/Elements/B-SOID/output3/'
# fig_format = 'png'
# outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/pose_relationships/'
# print('\n DATA FROM {} \n'.format(path))
# print('-' *... | 1,550 | 30.02 | 104 | py |
B-SOID | B-SOID-master/bsoid_figs/fig6_v2.py | import sys
import subprocess
print('\n \n \n KINEMATICS ANALYSIS \n \n \n')
path = '/Volumes/Elements/B-SOID/output3/'
fig_format = 'png'
outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/kinematics_py/'
print('\n DATA FROM {} \n'.format(path))
print('-' * 50)
print('\n' * 1)
print('Prepar... | 8,235 | 29.6171 | 93 | py |
B-SOID | B-SOID-master/bsoid_figs/figS4.py | import sys
import subprocess
print('\n \n \n FRAMSHIFT NEURAL DIFFERNCES \n \n \n')
path = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/workspace/neuralbehavior_durs.mat'
fig_format = 'png'
outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/neural_data/'
print('\n DATA FROM {} \n'.form... | 868 | 32.423077 | 98 | py |
B-SOID | B-SOID-master/bsoid_figs/figS2.py | import sys
import subprocess
import os
print('\n \n \n HUMAN EXERCISE UMAP \n \n \n')
path = '/Volumes/Elements/exercise_data/output/'
fig_format = 'png'
outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/human_exercise/'
print('\n DATA FROM {} \n'.format(path))
print('-' * 50)
# FIG S2
pri... | 1,947 | 32.586207 | 92 | py |
B-SOID | B-SOID-master/bsoid_figs/fig6.py | import sys
import subprocess
print('\n \n \n A2A CASPASE KINEMATICS ANALYSIS \n \n \n')
path = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/workspace/a2a_gr_Rkin.mat'
fig_format = 'png'
outpath = '/Volumes/Elements/Manuscripts/B-SOiD/bsoid_natcomm/figure_panels/kinematics_cdf/'
print('\n DATA FROM {} \n'.forma... | 3,226 | 30.637255 | 98 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/kfold_accuracy.py | import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import KFold
from operator import itemgetter
from utilities.load_data import appdata
from utilities.save_data import results
import sys, getopt
from ast import literal_eval
def generate_kfold(path, name, k):
appdata... | 2,753 | 32.585366 | 94 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/extract_kinematics.py | import numpy as np
from numpy import trapz
import os
from scipy.signal import find_peaks, resample, peak_widths
from utilities.detect_peaks import _plot
import matplotlib.pyplot as plt
from tqdm import tqdm
from utilities.load_data import appdata
from utilities.save_data import results
from utilities.statistics import ... | 11,164 | 45.327801 | 118 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/extract_images.py | import os
import ffmpeg
import cv2
from utilities.save_data import results
from utilities.processing import sort_nicely
import sys, getopt
def get_images(pathname, group_num):
try:
os.mkdir(str.join('', (pathname, '/pngs')))
except FileExistsError:
pass
try:
os.mkdir(str.join('', (... | 3,172 | 35.895349 | 116 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/trajectory_plot.py | import numpy as np
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
from utilities.load_data import appdata
from utilities.processing import data_processing
import sys, getopt
from ast import literal_eval
def limb_trajectory(path, name, animal_idx, bp, t_range):
appdata_ = appdata(path, name)
... | 4,750 | 33.427536 | 120 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/umap_clustering_plot.py | import numpy as np
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
from utilities.load_data import appdata
import sys, getopt
from matplotlib.axes._axes import _log as matplotlib_axes_logger
matplotlib_axes_logger.setLevel('ERROR')
def plot_enhanced_umap(path, name, fig_size, fig_format, outpath):... | 2,591 | 35 | 101 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/accuracy_boxplot.py | import seaborn as sns
import matplotlib.colors as mc
import colorsys
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import numpy as np
from utilities.load_data import load_sav
import sys, getopt
from ast import literal_eval
def lighten_color(color, amount=0):
# --------------------- SOURCE: @... | 3,084 | 32.172043 | 115 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/immse_cdf.py | import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import numpy as np
from utilities.load_data import load_mat
import sys, getopt
from ast import literal_eval
def plot_cdf(data, c, x_range, fig_size, fig_format, outpath):
figure(num=None, figsize=fig_size, dpi=300, facecolor='w', edgecolor='k')
... | 5,045 | 43.263158 | 118 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/kinematics_cdf_v2.py | import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import numpy as np
from utilities.load_data import load_sav
import sys, getopt
from ast import literal_eval
def plot_cdf(var, vname, data, c, x_range, bnct, tk, leg, fig_size, fig_format, outpath):
figure(num=None, figsize=fig_size, dpi=300, fac... | 6,124 | 40.385135 | 97 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/neural_plot.py | import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import numpy as np
from utilities.load_data import load_mat
import sys, getopt
from ast import literal_eval
def plot_neural_heatmap(algo, data, c, c_range, discrete_n, delim, cl, fig_size, fig_format, outpath, cb=False):
fi... | 3,251 | 34.736264 | 114 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/coherence_boxplot.py | import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import numpy as np
from utilities.load_data import load_sav
import sys, getopt
def plot_boxplot(algo, data, c, fig_size, fig_format, outpath):
figure(num=None, figsize=fig_size, dpi=300, facecolor='w', edgecolor='k')
ax... | 2,367 | 32.352113 | 121 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/pose_relationships_hist.py | import numpy as np
import itertools
from collections import OrderedDict
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
from utilities.load_data import appdata
import sys, getopt
from ast import literal_eval
def plot_pose_relationships(path, name, order, fig_size, fig_format, outpath):
appdat... | 6,572 | 49.953488 | 117 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/frameshift_coherence.py | import numpy as np
from utilities.load_data import appdata
from kfold_accuracy import reorganize_accuracy
from utilities.save_data import results
import sys, getopt
from ast import literal_eval
def generate_coherence(path, name, fps, target_fps, frame_skips, animal_index, t, order):
appdata_ = appdata(path, name)
... | 3,008 | 34.821429 | 113 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/__init__.py | 0 | 0 | 0 | py | |
B-SOID | B-SOID-master/bsoid_figs/subroutines/fsdiff_hist.py | import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import numpy as np
from utilities.load_data import load_mat
import sys, getopt
from ast import literal_eval
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import seaborn as sns
def add_subplot_axes(ax, rect, axisbg='w'):
fig = plt.g... | 5,406 | 39.350746 | 116 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/immse_heatmap.py | import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import numpy as np
from utilities.load_data import load_mat
import sys, getopt
from ast import literal_eval
def plot_heatmap(algo, data, c, c_range, discrete_n, delim, cl, fig_size, fig_format, outpath, cb=False):
figure(nu... | 3,372 | 35.268817 | 113 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/kinematics_cdf.py | import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import numpy as np
from utilities.load_data import load_mat
import sys, getopt
from ast import literal_eval
def plot_cdf(var, data, c, x_range, bn, tk, leg, fig_size, fig_format, outpath):
figure(num=None, figsize=fig_size, dpi=300, facecolor='w... | 3,747 | 36.48 | 93 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/pose_relationships_hist2.py | import numpy as np
import itertools
from collections import OrderedDict
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
from utilities.load_data import appdata
import sys, getopt
from ast import literal_eval
def plot_pose_relationships(path, name, order, fig_size, fig_format, outpath):
appdat... | 6,572 | 49.953488 | 117 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/utilities/load_json.py | import pandas as pd
import numpy as np
import glob
from tqdm import tqdm
path = '/Volumes/Elements/Drive/Data/Nahom/output/exercise2/'
POSE_BODY_25_BODY_PARTS = {0: "Nose", 1: "Neck", 2: "RShoulder", 3: "RElbow", 4: "RWrist", 5: "LShoulder", 6: "LElbow",
7: "LWrist", 8: "MidHip", 9: "RHip", ... | 1,888 | 48.710526 | 120 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/utilities/processing.py | import numpy as np
import pandas as pd
import re
def convert_int(s):
""" Converts digit string to integer
"""
if s.isdigit():
return int(s)
else:
return s
def alphanum_key(s):
""" Turn a string into a list of string and number chunks.
"z23a" -> ["z", 23, "a"]
"""
... | 755 | 20 | 86 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/utilities/detect_peaks.py | """Detect peaks in data based on their amplitude and other features."""
from __future__ import division, print_function
import numpy as np
__author__ = "Marcos Duarte, https://github.com/demotu/BMC"
__version__ = "1.0.4"
__license__ = "MIT"
def detect_peaks(x, mph=None, mpd=1, threshold=0, edge='rising',
... | 8,655 | 37.816143 | 103 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/utilities/statistics.py | import numpy as np
import pandas as pd
def transition_matrix(labels, n):
"""
:param labels: 1D array, predicted labels
:return df_tm: object, transition matrix data frame
"""
# n = 1 + max(labels)
tm = [[0] * n for _ in range(n)]
for (i, j) in zip(labels, labels[1:]):
tm[i][j] += 1... | 1,066 | 29.485714 | 75 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/utilities/load_data.py | import scipy.io
import os
import joblib
def load_mat(file):
return scipy.io.loadmat(file)
def load_sav(path, name, fname):
with open(os.path.join(path, str.join('', (name, '_', fname, '.sav'))), 'rb') as fr:
data = joblib.load(fr)
return [i for i in data]
class appdata:
def __init__(self,... | 2,087 | 38.396226 | 107 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/utilities/__init__.py | 0 | 0 | 0 | py | |
B-SOID | B-SOID-master/bsoid_figs/subroutines/utilities/save_data.py | import scipy.io
import os
import joblib
class results:
def __init__(self, path, name):
self.path = path
self.name = name
def save_sav(self, datalist, fname):
with open(os.path.join(self.path, str.join('', (self.name, '_', fname, '.sav'))), 'wb') as f:
joblib.dump(datalist... | 325 | 20.733333 | 101 | py |
B-SOID | B-SOID-master/bsoid_figs/subroutines/utilities/discrete_cmap.py | import numpy as np
import matplotlib.pyplot as plt
def discrete_cmap(N, base_cmap=None):
"""Create an N-bin discrete colormap from the specified input map"""
# Note that if base_cmap is a string or None, you can simply do
# return plt.cm.get_cmap(base_cmap, N)
# The following works for string, Non... | 516 | 33.466667 | 72 | py |
B-SOID | B-SOID-master/bsoid_app/extract_features.py | import itertools
import math
import os
import joblib
import numpy as np
import randfacts
import streamlit as st
import umap
from psutil import virtual_memory
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from streamlit import caching
from bsoid_app.bsoid_utilities.likelihoodpr... | 10,846 | 55.202073 | 124 | py |
B-SOID | B-SOID-master/bsoid_app/bsoid_analysis.py | import streamlit as st
from analysis_subroutines import video_analysis, machine_performance, trajectory_analysis, \
kinematics_analysis, directed_graph_analysis
from analysis_subroutines.analysis_utilities.cache_workspace import load_data
from analysis_subroutines.analysis_utilities.visuals import *
from bsoid_uti... | 3,674 | 53.044118 | 120 | py |
B-SOID | B-SOID-master/bsoid_app/clustering.py | import os
import hdbscan
import joblib
import numpy as np
import streamlit as st
import randfacts
from bsoid_app.config import *
from bsoid_app.bsoid_utilities import visuals
from bsoid_app.bsoid_utilities.load_workspace import load_clusters
from streamlit import caching
class cluster:
def __init__(self, worki... | 5,206 | 47.663551 | 120 | py |
B-SOID | B-SOID-master/bsoid_app/data_preprocess.py | import os
from datetime import date
import h5py
import joblib
import randfacts
import streamlit as st
from bsoid_app.bsoid_utilities import visuals
from bsoid_app.bsoid_utilities.likelihoodprocessing import *
from bsoid_app.bsoid_utilities.load_json import *
class preprocess:
def __init__(self):
st.sub... | 13,767 | 59.920354 | 119 | py |
B-SOID | B-SOID-master/bsoid_app/machine_learner.py | import os
import joblib
import randfacts
import streamlit as st
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split, cross_val_score
from bsoid_app.bsoid_utilities import visuals
from bsoid_app.bsoid_utilities.load_workspace import load_classifier
from streamlit im... | 4,516 | 48.637363 | 123 | py |
B-SOID | B-SOID-master/bsoid_app/__init__.py | 0 | 0 | 0 | py | |
B-SOID | B-SOID-master/bsoid_app/predict.py | import os
from datetime import date
import h5py
import joblib
import streamlit as st
from bsoid_app.bsoid_utilities import statistics
from bsoid_app.bsoid_utilities.bsoid_classification import *
from bsoid_app.bsoid_utilities.likelihoodprocessing import *
from bsoid_app.bsoid_utilities.load_json import *
class pred... | 13,196 | 58.714932 | 119 | py |
B-SOID | B-SOID-master/bsoid_app/export_training.py | import itertools
import os
import streamlit as st
from bsoid_app.bsoid_utilities.statistics import *
class export:
def __init__(self, working_dir, prefix, sampled_features, assignments, assign_prob, soft_assignments):
st.subheader('WHAT DID B-SOID LEARN?')
self.options = st.multiselect('What do... | 5,051 | 62.949367 | 116 | py |
B-SOID | B-SOID-master/bsoid_app/video_creator.py | import base64
import ffmpeg
import h5py
import streamlit as st
from bsoid_app.bsoid_utilities.bsoid_classification import *
from bsoid_app.bsoid_utilities.likelihoodprocessing import *
from bsoid_app.bsoid_utilities.load_json import *
from bsoid_app.bsoid_utilities.videoprocessing import *
@st.cache(allow_output_mu... | 14,523 | 52.007299 | 120 | py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.