Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
S
SENSE-PPI
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Konstantin Volzhenin
SENSE-PPI
Commits
2fe8d626
Commit
2fe8d626
authored
Jul 28, 2023
by
Konstantin Volzhenin
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
0.1.9 minor bugfix
parent
a65a0e1a
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
31 additions
and
38 deletions
+31
-38
predict.py
senseppi/commands/predict.py
+5
-10
predict_string.py
senseppi/commands/predict_string.py
+0
-0
test.py
senseppi/commands/test.py
+4
-9
train.py
senseppi/commands/train.py
+7
-12
network_utils.py
senseppi/network_utils.py
+2
-7
utils.py
senseppi/utils.py
+13
-0
No files found.
senseppi/commands/predict.py
View file @
2fe8d626
...
@@ -3,7 +3,6 @@ import pytorch_lightning as pl
...
@@ -3,7 +3,6 @@ import pytorch_lightning as pl
from
itertools
import
permutations
,
product
from
itertools
import
permutations
,
product
import
numpy
as
np
import
numpy
as
np
import
pandas
as
pd
import
pandas
as
pd
import
logging
import
pathlib
import
pathlib
import
argparse
import
argparse
from
..dataset
import
PairSequenceData
from
..dataset
import
PairSequenceData
...
@@ -97,11 +96,7 @@ def main(params):
...
@@ -97,11 +96,7 @@ def main(params):
compute_embeddings
(
params
)
compute_embeddings
(
params
)
# WARNING: due to some internal issues of pytorch, the mps backend is temporarily disabled
block_mps
(
params
)
if
params
.
device
==
'mps'
:
logging
.
warning
(
'WARNING: due to some internal issues of torch, the mps backend is temporarily disabled.'
'The cpu backend will be used instead.'
)
params
.
device
=
'cpu'
logging
.
info
(
'Predicting...'
)
logging
.
info
(
'Predicting...'
)
preds
=
predict
(
params
)
preds
=
predict
(
params
)
...
@@ -116,8 +111,8 @@ def main(params):
...
@@ -116,8 +111,8 @@ def main(params):
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
parser
=
argparse
.
ArgumentParser
()
p
red_p
arser
=
argparse
.
ArgumentParser
()
p
arser
=
add_args
(
parser
)
p
red_parser
=
add_args
(
pred_
parser
)
p
arams
=
parser
.
parse_args
()
p
red_params
=
pred_
parser
.
parse_args
()
main
(
params
)
main
(
p
red_p
arams
)
senseppi/commands/predict_string.py
View file @
2fe8d626
This diff is collapsed.
Click to expand it.
senseppi/commands/test.py
View file @
2fe8d626
from
torch.utils.data
import
DataLoader
from
torch.utils.data
import
DataLoader
import
pytorch_lightning
as
pl
import
pytorch_lightning
as
pl
import
pandas
as
pd
import
pandas
as
pd
import
logging
import
pathlib
import
pathlib
import
argparse
import
argparse
from
..dataset
import
PairSequenceData
from
..dataset
import
PairSequenceData
...
@@ -72,11 +71,7 @@ def main(params):
...
@@ -72,11 +71,7 @@ def main(params):
compute_embeddings
(
params
)
compute_embeddings
(
params
)
# WARNING: due to some internal issues of pytorch, the mps backend is temporarily disabled
block_mps
(
params
)
if
params
.
device
==
'mps'
:
logging
.
warning
(
'WARNING: due to some internal issues of torch, the mps backend is temporarily disabled.'
'The cpu backend will be used instead.'
)
params
.
device
=
'cpu'
logging
.
info
(
'Evaluating...'
)
logging
.
info
(
'Evaluating...'
)
test_metrics
=
test
(
params
)[
0
]
test_metrics
=
test
(
params
)[
0
]
...
@@ -87,7 +82,7 @@ def main(params):
...
@@ -87,7 +82,7 @@ def main(params):
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
test_parser
=
argparse
.
ArgumentParser
()
test_parser
=
argparse
.
ArgumentParser
()
parser
=
add_args
(
test_parser
)
test_
parser
=
add_args
(
test_parser
)
params
=
test_parser
.
parse_args
()
test_
params
=
test_parser
.
parse_args
()
main
(
params
)
main
(
test_
params
)
senseppi/commands/train.py
View file @
2fe8d626
...
@@ -2,8 +2,7 @@ import pytorch_lightning as pl
...
@@ -2,8 +2,7 @@ import pytorch_lightning as pl
from
pytorch_lightning.callbacks
import
ModelCheckpoint
from
pytorch_lightning.callbacks
import
ModelCheckpoint
import
pathlib
import
pathlib
import
argparse
import
argparse
import
logging
from
..utils
import
*
from
..utils
import
add_general_args
from
..model
import
SensePPIModel
from
..model
import
SensePPIModel
from
..dataset
import
PairSequenceData
from
..dataset
import
PairSequenceData
from
..esm2_model
import
add_esm_args
,
compute_embeddings
from
..esm2_model
import
add_esm_args
,
compute_embeddings
...
@@ -15,11 +14,7 @@ def main(params):
...
@@ -15,11 +14,7 @@ def main(params):
compute_embeddings
(
params
)
compute_embeddings
(
params
)
# WARNING: due to some internal issues of pytorch, the mps backend is temporarily disabled
block_mps
(
params
)
if
params
.
device
==
'mps'
:
logging
.
warning
(
'WARNING: due to some internal issues of torch, the mps backend is temporarily disabled.'
'The cpu backend will be used instead.'
)
params
.
device
=
'cpu'
dataset
=
PairSequenceData
(
emb_dir
=
params
.
output_dir_esm
,
actions_file
=
params
.
pairs_file
,
dataset
=
PairSequenceData
(
emb_dir
=
params
.
output_dir_esm
,
actions_file
=
params
.
pairs_file
,
max_len
=
params
.
max_len
,
labels
=
True
)
max_len
=
params
.
max_len
,
labels
=
True
)
...
@@ -80,8 +75,8 @@ def add_args(parser):
...
@@ -80,8 +75,8 @@ def add_args(parser):
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
parser
=
argparse
.
ArgumentParser
()
train_
parser
=
argparse
.
ArgumentParser
()
parser
=
add_args
(
parser
)
train_parser
=
add_args
(
train_
parser
)
params
=
parser
.
parse_args
()
train_params
=
train_
parser
.
parse_args
()
main
(
params
)
main
(
train_params
)
\ No newline at end of file
\ No newline at end of file
senseppi/network_utils.py
View file @
2fe8d626
...
@@ -15,18 +15,13 @@ import shutil
...
@@ -15,18 +15,13 @@ import shutil
DOWNLOAD_LINK_STRING
=
"https://stringdb-downloads.org/download/"
DOWNLOAD_LINK_STRING
=
"https://stringdb-downloads.org/download/"
def
generate_pairs_string
(
fasta_file
,
output_file
,
with_self
=
False
,
delete_proteins
=
None
):
def
generate_pairs_string
(
fasta_file
,
output_file
,
delete_proteins
=
None
):
ids
=
[]
ids
=
[]
for
record
in
SeqIO
.
parse
(
fasta_file
,
"fasta"
):
for
record
in
SeqIO
.
parse
(
fasta_file
,
"fasta"
):
ids
.
append
(
record
.
id
)
ids
.
append
(
record
.
id
)
if
with_self
:
all_pairs
=
[
p
for
p
in
product
(
ids
,
repeat
=
2
)]
else
:
all_pairs
=
[
p
for
p
in
permutations
(
ids
,
2
)]
pairs
=
[]
pairs
=
[]
for
p
in
all_pairs
:
for
p
in
[
p
for
p
in
permutations
(
ids
,
2
)]
:
if
(
p
[
1
],
p
[
0
])
not
in
pairs
and
(
p
[
0
],
p
[
1
])
not
in
pairs
:
if
(
p
[
1
],
p
[
0
])
not
in
pairs
and
(
p
[
0
],
p
[
1
])
not
in
pairs
:
pairs
.
append
(
p
)
pairs
.
append
(
p
)
...
...
senseppi/utils.py
View file @
2fe8d626
...
@@ -2,6 +2,7 @@ from Bio import SeqIO
...
@@ -2,6 +2,7 @@ from Bio import SeqIO
import
os
import
os
from
senseppi
import
__version__
from
senseppi
import
__version__
import
torch
import
torch
import
logging
def
add_general_args
(
parser
):
def
add_general_args
(
parser
):
...
@@ -29,6 +30,18 @@ def determine_device():
...
@@ -29,6 +30,18 @@ def determine_device():
return
device
return
device
def
block_mps
(
params
):
# WARNING: due to some internal issues of pytorch, the mps backend is temporarily disabled
if
hasattr
(
params
,
'device'
):
if
params
.
device
==
'mps'
:
logging
.
warning
(
'WARNING: due to some internal issues of torch, the mps backend is temporarily disabled.'
'The cpu backend will be used instead.'
)
if
torch
.
cuda
.
is_available
():
params
.
device
=
'gpu'
else
:
params
.
device
=
'cpu'
def
process_string_fasta
(
fasta_file
,
min_len
,
max_len
):
def
process_string_fasta
(
fasta_file
,
min_len
,
max_len
):
with
open
(
'file.tmp'
,
'w'
)
as
f
:
with
open
(
'file.tmp'
,
'w'
)
as
f
:
for
record
in
SeqIO
.
parse
(
fasta_file
,
"fasta"
):
for
record
in
SeqIO
.
parse
(
fasta_file
,
"fasta"
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment