#Set-Up
Set-Up System :#
Check RAM Usage (High or Low)#
#@title Check High RAM using or not ?
from psutil import virtual_memory
ram_gb = virtual_memory().total / 1e9
print('Your runtime has {:.1f} gigabytes of available RAM\n'.format(ram_gb))
if ram_gb < 20:
print('Not using a high-RAM runtime')
else:
print('You are using a high-RAM runtime!')
Your runtime has 27.3 gigabytes of available RAM
You are using a high-RAM runtime!
Note
As like above We are using high RAM .
Check GPU is Running or Not#
#@title Check GPU
!nvidia-smi -L
GPU 0: Tesla T4 (UUID: GPU-2c26e6d7-ad51-9f50-5a39-56f5b83453c7)
!nvidia-smi
Sun May 28 04:27:33 2023
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 525.85.12 Driver Version: 525.85.12 CUDA Version: 12.0 |
|-------------------------------+----------------------+----------------------+
| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
| | | MIG M. |
|===============================+======================+======================|
| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |
| N/A 33C P8 9W / 70W | 0MiB / 15360MiB | 0% Default |
| | | N/A |
+-------------------------------+----------------------+----------------------+
+-----------------------------------------------------------------------------+
| Processes: |
| GPU GI CI PID Type Process name GPU Memory |
| ID ID Usage |
|=============================================================================|
| No running processes found |
+-----------------------------------------------------------------------------+
#@title Check CPU
!lscpu
Architecture: x86_64
CPU op-mode(s): 32-bit, 64-bit
Byte Order: Little Endian
Address sizes: 46 bits physical, 48 bits virtual
CPU(s): 4
On-line CPU(s) list: 0-3
Thread(s) per core: 2
Core(s) per socket: 2
Socket(s): 1
NUMA node(s): 1
Vendor ID: GenuineIntel
CPU family: 6
Model: 85
Model name: Intel(R) Xeon(R) CPU @ 2.00GHz
Stepping: 3
CPU MHz: 2000.186
BogoMIPS: 4000.37
Hypervisor vendor: KVM
Virtualization type: full
L1d cache: 64 KiB
L1i cache: 64 KiB
L2 cache: 2 MiB
L3 cache: 38.5 MiB
NUMA node0 CPU(s): 0-3
Vulnerability Itlb multihit: Not affected
Vulnerability L1tf: Mitigation; PTE Inversion
Vulnerability Mds: Vulnerable; SMT Host state unknown
Vulnerability Meltdown: Vulnerable
Vulnerability Mmio stale data: Vulnerable
Vulnerability Retbleed: Vulnerable
Vulnerability Spec store bypass: Vulnerable
Vulnerability Spectre v1: Vulnerable: __user pointer sanitization and use
rcopy barriers only; no swapgs barriers
Vulnerability Spectre v2: Vulnerable, IBPB: disabled, STIBP: disabled, PB
RSB-eIBRS: Not affected
Vulnerability Srbds: Not affected
Vulnerability Tsx async abort: Vulnerable
Flags: fpu vme de pse tsc msr pae mce cx8 apic sep mtr
r pge mca cmov pat pse36 clflush mmx fxsr sse s
se2 ss ht syscall nx pdpe1gb rdtscp lm constant
_tsc rep_good nopl xtopology nonstop_tsc cpuid
tsc_known_freq pni pclmulqdq ssse3 fma cx16 pci
d sse4_1 sse4_2 x2apic movbe popcnt aes xsave a
vx f16c rdrand hypervisor lahf_lm abm 3dnowpref
etch invpcid_single ssbd ibrs ibpb stibp fsgsba
se tsc_adjust bmi1 hle avx2 smep bmi2 erms invp
cid rtm mpx avx512f avx512dq rdseed adx smap cl
flushopt clwb avx512cd avx512bw avx512vl xsaveo
pt xsavec xgetbv1 xsaves arat md_clear arch_cap
abilities
!lscpu | grep 'Thread(s) per core:'
Thread(s) per core: 2
#@title check CUDA version
!nvcc --version
nvcc: NVIDIA (R) Cuda compiler driver
Copyright (c) 2005-2022 NVIDIA Corporation
Built on Wed_Sep_21_10:33:58_PDT_2022
Cuda compilation tools, release 11.8, V11.8.89
Build cuda_11.8.r11.8/compiler.31833905_0
import numba
numba.__version__
'0.56.4'
#@title Downgrade rpy = 3.5.1
#-------- Because rpy latest version doen't suppot installing packages , we need to downgrade rpy = 3.5.1
# !pip install rpy2==3.5.1
!pip install -Iv rpy2==3.4.2
Using pip 23.1.2 from /usr/local/lib/python3.10/dist-packages/pip (python 3.10)
Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/
Collecting rpy2==3.4.2
Using cached rpy2-3.4.2-cp310-cp310-linux_x86_64.whl
Collecting cffi>=1.10.0 (from rpy2==3.4.2)
Using cached cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (441 kB)
Collecting jinja2 (from rpy2==3.4.2)
Using cached Jinja2-3.1.2-py3-none-any.whl (133 kB)
Collecting pytz (from rpy2==3.4.2)
Using cached pytz-2023.3-py2.py3-none-any.whl (502 kB)
Collecting tzlocal (from rpy2==3.4.2)
Using cached tzlocal-5.0.1-py3-none-any.whl (20 kB)
Collecting pycparser (from cffi>=1.10.0->rpy2==3.4.2)
Using cached pycparser-2.21-py2.py3-none-any.whl (118 kB)
Collecting MarkupSafe>=2.0 (from jinja2->rpy2==3.4.2)
Using cached MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (25 kB)
Installing collected packages: pytz, tzlocal, pycparser, MarkupSafe, jinja2, cffi, rpy2
ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.
anndata2ri 1.1 requires rpy2>=3.4.3, but you have rpy2 3.4.2 which is incompatible.
Successfully installed MarkupSafe-2.1.2 cffi-1.15.1 jinja2-3.1.2 pycparser-2.21 pytz-2022.7.1 rpy2-3.4.2 tzlocal-4.3
#@title Use Magic command to Run R in colab :
# %load_ext rpy2.ipython
%load_ext rpy2.ipython
Set-Up Working Directory#
#@title Working Directory :
# Path /content/drive/MyDrive/Single_Cell_RNAseq
import os
# Setting our working directory to a folder in our Google Drive. This way, if our notebook times out,
# our files will be saved in your Google Drive!
# the base Google Drive directory
root_dir = "/content/drive/My Drive/" #@param {type:"raw"}
# choose where we want our project files to be saved
project_folder = "scRNA_using_Python/" #@param {type:"raw"}
def create_and_set_working_directory(project_folder):
# check if our project folder exists. if not, it will be created.
if os.path.isdir(root_dir + project_folder) == False:
os.mkdir(root_dir + project_folder)
print(root_dir + project_folder + ' did not exist but was created.')
# change the OS to use our project folder as the working directory
os.chdir(root_dir + project_folder)
# create a test file to make sure it shows up in the right place
!touch 'new_file_in_working_directory.txt'
print('\n our working directory was changed to ' + root_dir + project_folder + \
"\n\n An empty text file was created here. We can also run !pwd to confirm the current working directory." )
create_and_set_working_directory(project_folder)
our working directory was changed to /content/drive/My Drive/scRNA_using_Python/
An empty text file was created here. We can also run !pwd to confirm the current working directory.
#@title Check Working DIrectory :
!pwd
/content/drive/My Drive/scRNA_using_Python
Install python Libraries :#
#@title Install Libraries :
import locale
locale.getpreferredencoding = lambda: "UTF-8"
!pip install scvi-tools
!pip install scikit-misc
!pip install leidenalg
!pip install celltypist
!pip install scanpy
!pip install muon
!pip install -U scvelo
!pip install dropkick
!pip install pybiomart
!pip install anndata2ri
!pip install diopy
%pip install git+https://github.com/Novartis/scAR.git
!pip install louvain
Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/
Collecting louvain
Downloading louvain-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.1 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.1/1.1 MB 38.6 MB/s eta 0:00:00
?25hRequirement already satisfied: igraph<0.11,>=0.10.0 in /usr/local/lib/python3.10/dist-packages (from louvain) (0.10.4)
Requirement already satisfied: texttable>=1.6.2 in /usr/local/lib/python3.10/dist-packages (from igraph<0.11,>=0.10.0->louvain) (1.6.7)
Installing collected packages: louvain
Successfully installed louvain-0.8.0
!pip install diffxpy
!pip install pipreqs
Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/
Collecting pipreqs
Downloading pipreqs-0.4.13-py2.py3-none-any.whl (33 kB)
Collecting docopt (from pipreqs)
Downloading docopt-0.6.2.tar.gz (25 kB)
Preparing metadata (setup.py) ... ?25l?25hdone
Collecting yarg (from pipreqs)
Downloading yarg-0.1.9-py2.py3-none-any.whl (19 kB)
Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from yarg->pipreqs) (2.27.1)
Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->yarg->pipreqs) (1.26.15)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->yarg->pipreqs) (2022.12.7)
Requirement already satisfied: charset-normalizer~=2.0.0 in /usr/local/lib/python3.10/dist-packages (from requests->yarg->pipreqs) (2.0.12)
Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->yarg->pipreqs) (3.4)
Building wheels for collected packages: docopt
Building wheel for docopt (setup.py) ... ?25l?25hdone
Created wheel for docopt: filename=docopt-0.6.2-py2.py3-none-any.whl size=13707 sha256=d63b1628fa2b1b164194d2d5a0ff369ea09985aff2528d4d14dba07a261831d5
Stored in directory: /root/.cache/pip/wheels/fc/ab/d4/5da2067ac95b36618c629a5f93f809425700506f72c9732fac
Successfully built docopt
Installing collected packages: docopt, yarg, pipreqs
Successfully installed docopt-0.6.2 pipreqs-0.4.13 yarg-0.1.9
!pip install bbknn
Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/
Collecting bbknn
Downloading bbknn-1.5.1-py3-none-any.whl (11 kB)
Requirement already satisfied: Cython in /usr/local/lib/python3.10/dist-packages (from bbknn) (0.29.34)
Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from bbknn) (1.22.4)
Requirement already satisfied: scipy in /usr/local/lib/python3.10/dist-packages (from bbknn) (1.10.1)
Requirement already satisfied: annoy in /usr/local/lib/python3.10/dist-packages (from bbknn) (1.17.2)
Requirement already satisfied: pynndescent in /usr/local/lib/python3.10/dist-packages (from bbknn) (0.5.10)
Requirement already satisfied: umap-learn in /usr/local/lib/python3.10/dist-packages (from bbknn) (0.5.3)
Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (from bbknn) (1.2.2)
Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from bbknn) (23.1)
Requirement already satisfied: numba>=0.51.2 in /usr/local/lib/python3.10/dist-packages (from pynndescent->bbknn) (0.56.4)
Requirement already satisfied: llvmlite>=0.30 in /usr/local/lib/python3.10/dist-packages (from pynndescent->bbknn) (0.39.1)
Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.10/dist-packages (from pynndescent->bbknn) (1.2.0)
Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->bbknn) (3.1.0)
Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from umap-learn->bbknn) (4.65.0)
Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from numba>=0.51.2->pynndescent->bbknn) (67.7.2)
Installing collected packages: bbknn
Successfully installed bbknn-1.5.1
!pip install scanorama
Install R Libraries :#
%%R
install.packages("anndata")
#@title Install Bioconductor Packages :
%%R
biopckgs = c("scry")
if (!require("BiocManager", quietly = TRUE))
install.packages("BiocManager")
BiocManager::install(biopckgs)
!sudo apt-get install tree
Load Libraries :#
#@title Load Libraries :
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib import rcParams
from matplotlib.pyplot import rc_context
from matplotlib import colors
import seaborn as sns
import scanpy as sc
import scipy as sp
from scipy.stats import median_abs_deviation
from scipy.sparse import csr_matrix, issparse
from scar import model, setup_anndata
import anndata as ad
import scvelo as scv
import scvi
import dropkick as dk
import scanorama
import bbknn
import anndata2ri
import rpy2.rinterface_lib.callbacks as rcb
import rpy2.robjects as ro
import celltypist
from celltypist import models
import warnings
warnings.simplefilter("ignore")
scv.settings.verbosity = 3 # show errors(0), warnings(1), info(2), hints(3)
scv.settings.presenter_view = True # set max width size for presenter view
#scv.set_figure_params('scvelo') # for beautified visualization
sc.settings.verbosity = 0
sc.settings.set_figure_params(
dpi=80,
facecolor="white",
frameon=False,
)
#rcb.logger.setLevel(logging.ERROR)
ro.pandas2ri.activate()
anndata2ri.activate()
%load_ext rpy2.ipython
INFO:lightning_fabric.utilities.seed:Global seed set to 0
#@title Load Libraries :
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib import rcParams
from matplotlib.pyplot import rc_context
from matplotlib import colors
import seaborn as sns
import scanpy as sc
import scipy as sp
from scipy.stats import median_abs_deviation
from scipy.sparse import csr_matrix, issparse
from scar import model, setup_anndata
import anndata as ad
import scvelo as scv
import scvi
import dropkick as dk
import celltypist
from celltypist import models
import warnings
warnings.simplefilter("ignore")
sc.settings.verbosity = 3
sc.settings.set_figure_params(
dpi=80,
facecolor="white",
frameon=False,
color_map="viridis",
)
scv.settings.verbosity = 3 # show errors(0), warnings(1), info(2), hints(3)
scv.settings.presenter_view = True # set max width size for presenter view
scv.set_figure_params('scvelo') # for beautified visualization
INFO:lightning_fabric.utilities.seed:Global seed set to 0
import diffxpy.api as de
#@title Create requirement file
!pipreqs .
WARNING: requirements.txt already exists, use --force to overwrite it
!pip freeze > requirementsscRNA.txt