{ "nbformat": 4, "nbformat_minor": 2, "metadata": { "save_output": true, "synapse_widget": { "version": "0.1", "state": {} }, "kernelspec": { "name": "synapse_pyspark", "display_name": "python" }, "language_info": { "name": "python" } }, "cells": [ { "execution_count": null, "cell_type": "code", "metadata": {}, "source": [ "from pyspark.sql import SparkSession\n", "from pyspark.sql.types import *\n", "account_name = \"devfiles001\"\n", "container_name = \"testfolder\"\n", "relative_path = \"\"\n", "adls_path = 'abfss://%s@%s.dfs.core.windows.net/%s' % (container_name, account_name, relative_path)\n", " " ], "outputs": [] }, { "execution_count": null, "cell_type": "code", "metadata": { "jupyter": { "source_hidden": false, "outputs_hidden": false }, "nteract": { "transient": { "deleting": false } } }, "source": [ "df1 = spark.read.option('header', 'true') \\\n", " .option('delimiter', ',') \\\n", " .csv(adls_path + '/Book1.csv')" ], "outputs": [] } ] }