generatebio/chroma

Simple example fails on windows 11 on jupyter lab

eugenio opened this issue · 5 comments

Example below

`

from tqdm.autonotebook import tqdm
from chroma import Chroma, conditioners

chroma = Chroma()
conditioner = conditioners.SymmetryConditioner(G="C_3", num_chain_neighbors=2)
protein = chroma.sample(
chain_lengths=[100],
conditioner=conditioner,
langevin_factor=8,
inverse_temperature=8,
sde_func="langevin",
potts_symmetry_order=conditioner.potts_symmetry_order)

protein.to("sample-C3.cif")

`

fails with:

`

FileNotFoundError Traceback (most recent call last)
Cell In[4], line 4
1 from tqdm.autonotebook import tqdm
2 from chroma import Chroma, conditioners
----> 4 chroma = Chroma()
5 conditioner = conditioners.SymmetryConditioner(G="C_3", num_chain_neighbors=2)
6 protein = chroma.sample(
7 chain_lengths=[100],
8 conditioner=conditioner,
(...)
11 sde_func="langevin",
12 potts_symmetry_order=conditioner.potts_symmetry_order)

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\models\chroma.py:84, in Chroma.init(self, weights_backbone, weights_design, device, strict, verbose)
81 else:
82 device = "cpu"
---> 84 self.backbone_network = graph_backbone.load_model(
85 weights_backbone, device=device, strict=strict, verbose=verbose
86 ).eval()
88 self.design_network = graph_design.load_model(
89 weights_design, device=device, strict=strict, verbose=False,
90 ).eval()

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\models\graph_backbone.py:405, in load_model(weight_file, device, strict, strict_unexpected, verbose)
377 def load_model(
378 weight_file: str,
379 device: str = "cpu",
(...)
382 verbose: bool = True,
383 ) -> GraphBackbone:
384 """Load model GraphBackbone
385
386 Args:
(...)
403 model (GraphBackbone): Instance of GraphBackbone with loaded weights.
404 """
--> 405 return utility_load_model(
406 weight_file,
407 GraphBackbone,
408 device=device,
409 strict=strict,
410 strict_unexpected=strict_unexpected,
411 verbose=verbose,
412 )

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\utility\model.py:107, in load_model(weights, model_class, device, strict, strict_unexpected, verbose)
105 # load model weights
106 params = torch.load(weights, map_location="cpu")
--> 107 model = model_class(**params["init_kwargs"]).to(device)
108 missing_keys, unexpected_keys = model.load_state_dict(
109 params["model_state_dict"], strict=strict
110 )
111 if strict_unexpected and len(unexpected_keys) > 0:

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\models\graph_backbone.py:114, in GraphBackbone.init(self, dim_nodes, dim_edges, num_neighbors, node_features, edge_features, num_layers, dropout, node_mlp_layers, node_mlp_dim, edge_update, edge_mlp_layers, edge_mlp_dim, skip_connect_input, mlp_activation, decoder_num_hidden, graph_criterion, graph_random_min_local, backbone_update_method, backbone_update_iterations, backbone_update_num_weights, backbone_update_unconstrained, use_time_features, time_feature_type, time_log_feature_scaling, noise_schedule, noise_covariance_model, noise_beta_min, noise_beta_max, noise_log_snr_range, noise_complex_scaling, loss_scale, loss_scale_ssnr_cutoff, loss_function, checkpoint_gradients, prediction_type, num_graph_cycles, **kwargs)
111 # Encoder GNN process backbone
112 self.num_graph_cycles = args.num_graph_cycles
113 self.encoders = nn.ModuleList(
--> 114 [
115 BackboneEncoderGNN(
116 dim_nodes=args.dim_nodes,
117 dim_edges=args.dim_edges,
118 num_neighbors=args.num_neighbors,
119 node_features=args.node_features,
120 edge_features=args.edge_features,
121 num_layers=args.num_layers,
122 node_mlp_layers=args.node_mlp_layers,
123 node_mlp_dim=args.node_mlp_dim,
124 edge_update=args.edge_update,
125 edge_mlp_layers=args.edge_mlp_layers,
126 edge_mlp_dim=args.edge_mlp_dim,
127 mlp_activation=args.mlp_activation,
128 dropout=args.dropout,
129 skip_connect_input=args.skip_connect_input,
130 graph_criterion=args.graph_criterion,
131 graph_random_min_local=args.graph_random_min_local,
132 checkpoint_gradients=checkpoint_gradients,
133 )
134 for i in range(self.num_graph_cycles)
135 ]
136 )
138 self.backbone_updates = nn.ModuleList(
139 [
140 backbone.GraphBackboneUpdate(
(...)
149 ]
150 )
152 self.use_time_features = args.use_time_features

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\models\graph_backbone.py:115, in (.0)
111 # Encoder GNN process backbone
112 self.num_graph_cycles = args.num_graph_cycles
113 self.encoders = nn.ModuleList(
114 [
--> 115 BackboneEncoderGNN(
116 dim_nodes=args.dim_nodes,
117 dim_edges=args.dim_edges,
118 num_neighbors=args.num_neighbors,
119 node_features=args.node_features,
120 edge_features=args.edge_features,
121 num_layers=args.num_layers,
122 node_mlp_layers=args.node_mlp_layers,
123 node_mlp_dim=args.node_mlp_dim,
124 edge_update=args.edge_update,
125 edge_mlp_layers=args.edge_mlp_layers,
126 edge_mlp_dim=args.edge_mlp_dim,
127 mlp_activation=args.mlp_activation,
128 dropout=args.dropout,
129 skip_connect_input=args.skip_connect_input,
130 graph_criterion=args.graph_criterion,
131 graph_random_min_local=args.graph_random_min_local,
132 checkpoint_gradients=checkpoint_gradients,
133 )
134 for i in range(self.num_graph_cycles)
135 ]
136 )
138 self.backbone_updates = nn.ModuleList(
139 [
140 backbone.GraphBackboneUpdate(
(...)
149 ]
150 )
152 self.use_time_features = args.use_time_features

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\models\graph_design.py:1192, in BackboneEncoderGNN.init(self, dim_nodes, dim_edges, num_neighbors, node_features, edge_features, num_layers, node_mlp_layers, node_mlp_dim, edge_update, edge_mlp_layers, edge_mlp_dim, skip_connect_input, mlp_activation, dropout, graph_distance_atom_type, graph_cutoff, graph_mask_interfaces, graph_criterion, graph_random_min_local, checkpoint_gradients, **kwargs)
1182 self.checkpoint_gradients = checkpoint_gradients
1184 graph_kwargs = {
1185 "distance_atom_type": args.graph_distance_atom_type,
1186 "cutoff": args.graph_cutoff,
(...)
1189 "random_min_local": args.graph_random_min_local,
1190 }
-> 1192 self.feature_graph = protein_graph.ProteinFeatureGraph(
1193 dim_nodes=args.dim_nodes,
1194 dim_edges=args.dim_edges,
1195 num_neighbors=args.num_neighbors,
1196 graph_kwargs=graph_kwargs,
1197 node_features=args.node_features,
1198 edge_features=args.edge_features,
1199 )
1201 self.gnn = graph.GraphNN(
1202 dim_nodes=args.dim_nodes,
1203 dim_edges=args.dim_edges,
(...)
1215 checkpoint_gradients=checkpoint_gradients,
1216 )

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\layers\structure\protein_graph.py:182, in ProteinFeatureGraph.init(self, dim_nodes, dim_edges, num_neighbors, graph_kwargs, node_features, edge_features, centered, centered_pdb)
180 self.centered_pdb = centered_pdb.lower()
181 if self.centered:
--> 182 self._load_centering_params(self.centered_pdb)
184 """
185 Storing separate linear transformations for each layer, rather than concat + one
186 large linear, provides a more even weighting of the different input
(...)
191 dimensions.
192 """
193 self.node_linears = nn.ModuleList(
194 [nn.Linear(l.dim_out, self.dim_nodes) for l in self.node_layers]
195 )

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\layers\structure\protein_graph.py:276, in ProteinFeatureGraph._load_centering_params(self, reference_pdb)
274 else:
275 print(f"Computing reference stats for {reference_pdb}")
--> 276 param_dictionary = self._reference_stats(reference_pdb)
277 json_line = json.dumps(param_dictionary)
278 f.write(prefix + "\t" + json_line + "\n")

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\layers\structure\protein_graph.py:294, in ProteinFeatureGraph._reference_stats(self, reference_pdb)
293 def _reference_stats(self, reference_pdb):
--> 294 X, C, _ = Protein.from_PDBID(reference_pdb).to_XCS()
295 stats_dict = self._feature_stats(X, C)
296 return stats_dict

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\data\protein.py:234, in Protein.from_PDBID(cls, pdb_id, canonicalize, device)
231 from chroma.utility.fetchdb import RCSB_file_download
233 file_cif = f"/tmp/{pdb_id}.cif"
--> 234 RCSB_file_download(pdb_id, ".cif", file_cif)
235 protein = cls.from_CIF(file_cif, canonicalize=canonicalize, device=device)
236 unlink(file_cif)

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\utility\fetchdb.py:47, in RCSB_file_download(pdb_id, ext, local_filename)
37 """Downloads a file from the RCSB files section.
38
39 Args:
(...)
44 None
45 """
46 url = f"https://files.rcsb.org/view/{pdb_id.upper()}{ext}"
---> 47 return _download_file(url, local_filename)

File ~\OneDrive\Documenti\programmazione\chroma\lib\site-packages\chroma\utility\fetchdb.py:27, in _download_file(url, out_file)
25 with requests.get(url, stream=True) as r:
26 r.raise_for_status()
---> 27 with open(out_file, "wb") as f:
28 for chunk in r.iter_content(chunk_size=8192):
29 if chunk:

FileNotFoundError: [Errno 2] No such file or directory: '/tmp/2g3n.cif'

`

Hi, this is related to #13, because some file saving path is hard-coded, and is not compatible with Windows filesystem. We are working on a fix. We did all our testing and development in linux-based systems.

In the meantime, can you try Windows subsystem for Linux?

Hello! We've fixed the hard-coded path @wujiewang mentioned above. Could you please try again?

Hello @aismail3-gnr8,

I suppose that these changes haven't been pushed to the pipy package?

should I try the git version?

Yes, if you don't mind doing a fresh git pull please go ahead!

@eugenio Hey, will close this for now, and feel free to reopen or post new issues if you are still seeing problems.