[PYTHON] VS Code snippets for data analysts

at first

When I write code on a daily basis, I often write or search for the same content over and over again. In such a case, if you register the snippet, you can input it with less effort, so coding will be faster. This time, I will introduce snippets that are useful when analyzing data.

Setting method

See below for how to register a snippet with VS Code.

-VsCode snippet recommendation -Enjoy with user-defined snippets in Visual Studio Code -Define snippets in Visual Studio Code-with features that may not be familiar to beginners-

Contents

Create snippets for the following libraries.

snippets/python.json

snippets/python.json


{
    "lgb": {
        "prefix": [
            "lgb",
            "import lightgbm as lgb"
        ],
        "body": "import lightgbm as lgb",
        "description": "Import LightGBM"
    },
    "np": {
        "prefix": [
            "np",
            "import numpy as np"
        ],
        "body": "import numpy as np",
        "description": "Import Numpy"
    },
    "pd": {
        "prefix": [
            "pd",
            "import pandas as pd"
        ],
        "body": "import pandas as pd",
        "description": "Import Pandas"
    },
    "plt": {
        "prefix": [
            "plt",
            "import matplotlib.pyplot as plt",
            "from matplotlib import ..."
        ],
        "body": "from matplotlib import pyplot as plt",
        "description": "Import Matplotlib"
    },
    "sns": {
        "prefix": [
            "sns",
            "import seaborn as sns"
        ],
        "body": "import seaborn as sns",
        "description": "Import seaborn"
    },
    "joblib.dump": {
        "prefix": [
            "joblib.dump",
            "from joblib import dump"
        ],
        "body": "from joblib import dump",
        "description": "Import `dump` in Joblib"
    },
    "joblib.load": {
        "prefix": [
            "joblib.load",
            "from joblib import load"
        ],
        "body": "from joblib import load",
        "description": "Import `load` in Joblib"
    },
    "sklearn.compose.make_column_transformer": {
        "prefix": [
            "sklearn.compose.make_column_transformer",
            "from sklearn.compose import ..."
        ],
        "body": "from sklearn.compose import make_column_transformer",
        "description": "Import `make_column_transformer` in scikit-learn"
    },
    "sklearn.datasets.load_*": {
        "prefix": [
            "sklearn.datasets.load_*",
            "from sklearn.datasets import ..."
        ],
        "body": "from sklearn.datasets import ${1:load_iris}",
        "description": "Import a function that loads a dataset"
    },
    "sklearn.pipeline.make_pipeline": {
        "prefix": [
            "sklearn.pipeline.make_pipeline",
            "from sklearn.pipeline import ..."
        ],
        "body": "from sklearn.pipeline import make_pipeline",
        "description": "Import `make_pipeline` in scikit-learn"
    },
    "logger = ...": {
        "prefix": "logger = ...",
        "body": "logger = logging.getLogger(${1:__name__})",
        "description": "Get a logger"
    },
    "dtrain = ...": {
        "prefix": "dtrain = ...",
        "body": "dtrain = lgb.Dataset(${1:X}, label=${2:y})",
        "description": "Create a LightGBM dataset instance"
    },
    "booster = ...": {
        "prefix": "booster = ...",
        "body": [
            "booster = lgb.train(",
            "\t${1:params},",
            "\t${2:dtrain},",
            "\t${3:# **kwargs}",
            ")"
        ],
        "description": "Train a LightGBM booster"
    },
    "ax = ...": {
        "prefix": "ax = ...",
        "body": [
            "ax = lgb.plot_importance(",
            "\t${1:booster},",
            "\t${2:# **kwargs}",
            ")"
        ],
        "description": "Plot feature importances"
    },
    "f, ax = ...": {
        "prefix": "f, ax = ...",
        "body": "f, ax = plt.subplots(figsize=${1:(8, 6)})",
        "description": "Create a figure and a set of subplots"
    },
    "df = ...": {
        "prefix": "df = ...",
        "body": [
            "df = pd.read_csv(",
            "\t${1:filepath_or_buffer},",
            "\t${2:# **kwargs}",
            ")"
        ],
        "description": "Read a csv file into a Pandas dataFrame"
    },
    "description = ...": {
        "prefix": "description = ...",
        "body": "description = ${1:df}.describe(include=${2:\"all\"})",
        "description": "Create a Pandas dataframe description"
    },
    "with pd.option_context(...": {
        "prefix": "with pd.option_context(...",
        "body": [
            "with.pd.option_context(",
            "\t\"display.max_rows\",",
            "\t${1:None},",
            "\t\"display.max_columns\",",
            "\t${2:None},",
            "):",
            "\tdisplay(${3:pass})"
        ],
        "description": "Set temporarily Pandas options"
    },
    "X, y = ...": {
        "prefix": "X, y = ...",
        "body": "X, y = ${1:load_iris}(return_X_y=True)",
        "description": "Load and return the dataset"
    },
    "X_train, X_test, ...": {
        "prefix": "X_train, X_test, ...",
        "body": [
            "X_train, X_test, y_train, y_test = train_test_split(",
            "\tX,",
            "\ty,",
            "\trandom_state=${1:0},",
            "\tshuffle=${2:True},",
            ")"
        ],
        "description": "Split arrays into train and test subsets"
    },
    "estimator = BaseEstimator(...": {
        "prefix": "estimator = BaseEstimator(...",
        "body": [
            "estimator = ${1:BaseEstimator}(",
            "\t${2:# **params}",
            ")"
        ],
        "description": "Create an scikit-learn estimator instance"
    },
    "estimator = make_pipeline(...": {
        "prefix": "estimator = make_pipeline(...",
        "body": [
            "estimator = make_pipeline(",
            "\t${1:estimator},",
            "\t${2:# *steps}",
            ")"
        ],
        "description": "Create a scikit-learn pipeline instance"
    },
    "estimator = make_column_transformer(...": {
        "prefix": "estimator = make_column_transformer(...",
        "body": [
            "estimator = make_column_transformer(",
            "\t(${1:estimator}, ${2:columns}),",
            "\t${3:# *transformers}",
            ")"
        ],
        "description": "Create a scikit-learn column transformer instance"
    },
    "estimator.fit(...": {
        "prefix": "estimator.fit(...",
        "body": [
            "${1:estimator}.fit(",
            "\t${2:X},",
            "\ty=${3:y},",
            "\t${4:# **fit_params}",
            ")"
        ],
        "description": "Fit the estimator according to the given training data"
    },
    "dump(...": {
        "prefix": "dump(...",
        "body": "dump(${1:estimator}, ${2:filename}, compress=${3:0})",
        "description": "Save the estimator"
    },
    "estimator = load(...": {
        "prefix": "estimator = load(...",
        "body": "estimator = load(${1:filename})",
        "description": "Load the estimator"
    },
    "y_pred = ...": {
        "prefix": "y_pred = ...",
        "body": "y_pred = ${1:estimator}.predict(${2:X})",
        "description": "Predict using the fitted model"
    },
    "X = ...": {
        "prefix": "X = ...",
        "body": "X = ${1:estimator}.transform(${2:X})",
        "description": "Transform the data"
    }
}

At the end

If you come up with a new snippet, I'll update it from time to time.

Recommended Posts

VS Code snippets for data analysts
Initial settings for Mac (for data analysts)
(For myself) Put Flask in VS Code
VS Code settings
Set up TinyGo development environment for VS Code
Knowledge and study methods required for future data analysts
VS Code settings for developing in Python with completion
Techniques for code testing?
Enable intellisense for external libraries with Pipenv + VS Code
Expose settings.json for efficient Python coding in VS Code
Python code for writing CSV data to DSX object storage
Comfortable LaTeX with Windows Subsystem for Linux and VS Code
Data set for machine learning
Python with VS Code (Windows 10)
Python for Data Analysis Chapter 4
Write Spigot in VS Code
Python code memo for yourself
Set VS Code to PyCharm.
Test code for evaluating decorators
Debug Python with VS Code
Python frequently used code snippets
Python for Data Analysis Chapter 2
[PyTorch] Data Augmentation for segmentation
[Python] Sample code for Python grammar
Tips for data analysis ・ Notes
Python for Data Analysis Chapter 3
Create a Python environment for professionals in VS Code on Windows