update,
This commit is contained in:
355
tunmnlu/task_3/Skeleton/Q1/_submit/q1b/q1.ipynb
Normal file
355
tunmnlu/task_3/Skeleton/Q1/_submit/q1b/q1.ipynb
Normal file
@@ -0,0 +1,355 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "e5905a69",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# CSE6242 - HW3 - Q1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "09289981",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Pyspark Imports"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "139318cb",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"### DO NOT MODIFY THIS CELL ###\n",
|
||||
"import pyspark\n",
|
||||
"from pyspark.sql import SQLContext\n",
|
||||
"from pyspark.sql.functions import hour, when, col, date_format, to_timestamp, round, coalesce\n",
|
||||
"from pyspark.sql.functions import *"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "3fd9e0f8",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Initialize PySpark Context"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "b0c18c6c",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Setting default log level to \"WARN\".\n",
|
||||
"To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n",
|
||||
"23/10/18 14:58:22 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n",
|
||||
"23/10/18 14:58:22 WARN Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041.\n",
|
||||
"23/10/18 14:58:22 WARN Utils: Service 'SparkUI' could not bind on port 4041. Attempting port 4042.\n",
|
||||
"23/10/18 14:58:22 WARN Utils: Service 'SparkUI' could not bind on port 4042. Attempting port 4043.\n",
|
||||
"/usr/local/lib/python3.9/dist-packages/pyspark/sql/context.py:113: FutureWarning: Deprecated in 3.0.0. Use SparkSession.builder.getOrCreate() instead.\n",
|
||||
" warnings.warn(\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"### DO NOT MODIFY THIS CELL ###\n",
|
||||
"sc = pyspark.SparkContext(appName=\"HW3-Q1\")\n",
|
||||
"sqlContext = SQLContext(sc)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "d68ae314",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Define function for loading data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "7e5bbdda",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"### DO NOT MODIFY THIS CELL ###\n",
|
||||
"def load_data():\n",
|
||||
" df = sqlContext.read.option(\"header\",True) \\\n",
|
||||
" .csv(\"yellow_tripdata_2019-01_short.csv\")\n",
|
||||
" return df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "0d52409d",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Q1.a"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "e43f6e00",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Perform data casting to clean incoming dataset"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "11f801b4",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def clean_data(df):\n",
|
||||
" '''\n",
|
||||
" input: df a dataframe\n",
|
||||
" output: df a dataframe with the all the original columns\n",
|
||||
" '''\n",
|
||||
" \n",
|
||||
" # START YOUR CODE HERE ---------\n",
|
||||
" from pyspark.sql.types import StructField, StructType, IntegerType, TimestampType, FloatType, StringType\n",
|
||||
"\n",
|
||||
" df = df.withColumn(\"passenger_count\", df[\"passenger_count\"].cast(IntegerType()))\n",
|
||||
" df = df.withColumn(\"total_amount\", df[\"total_amount\"].cast(FloatType()))\n",
|
||||
" df = df.withColumn(\"tip_amount\", df[\"tip_amount\"].cast(FloatType()))\n",
|
||||
" df = df.withColumn(\"trip_distance\", df[\"trip_distance\"].cast(FloatType()))\n",
|
||||
" df = df.withColumn(\"fare_amount\", df[\"fare_amount\"].cast(FloatType()))\n",
|
||||
" df = df.withColumn(\"tpep_pickup_datetime\", df[\"tpep_pickup_datetime\"].cast(TimestampType()))\n",
|
||||
" df = df.withColumn(\"tpep_dropoff_datetime\", df[\"tpep_dropoff_datetime\"].cast(TimestampType()))\n",
|
||||
"\n",
|
||||
" # END YOUR CODE HERE -----------\n",
|
||||
" \n",
|
||||
" return df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "d4f565d0",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Q1.b"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "72b4f712",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Find rate per person for based on how many passengers travel between pickup and dropoff locations. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "4e115152",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def common_pair(df):\n",
|
||||
" '''\n",
|
||||
" input: df a dataframe\n",
|
||||
" output: df a dataframe with following columns:\n",
|
||||
" - PULocationID\n",
|
||||
" - DOLocationID\n",
|
||||
" - passenger_count\n",
|
||||
" - per_person_rate\n",
|
||||
" \n",
|
||||
" per_person_rate is the total_amount per person for a given pair.\n",
|
||||
" \n",
|
||||
" '''\n",
|
||||
" \n",
|
||||
" # START YOUR CODE HERE ---------\n",
|
||||
" from pyspark.sql import Window\n",
|
||||
"\n",
|
||||
" partition_cols = ['PULocationID','DOLocationID']\n",
|
||||
"\n",
|
||||
" group_by_result = df.groupBy(partition_cols).count()\n",
|
||||
" # group_by_result.show()\n",
|
||||
"\n",
|
||||
" # Filter out any trips that have the same pick-up and drop-off location. \n",
|
||||
" df_temp = df.filter((df.PULocationID != df.DOLocationID))\n",
|
||||
" # group_by_result_difference_location.show()\n",
|
||||
"\n",
|
||||
" # # [4 pts] You will be modifying the function common_pair. \n",
|
||||
" # # Return the top 10 pickup-dropoff location pairs that have the highest number of total passengers who have traveled between them. \n",
|
||||
" # # Sort the location pairs by total passengers. \n",
|
||||
" df_temp = df_temp.withColumn(\"passenger_count\", sum(\"passenger_count\").over(Window.partitionBy(*partition_cols)))\n",
|
||||
" \n",
|
||||
" # # For each location pair, also compute \n",
|
||||
" # # the average amount per passenger over all trips (name this per_person_rate), utilizing total_amount.\n",
|
||||
" df_temp = df_temp.withColumn(\"total_amount_partition\", sum(\"total_amount\").over(Window.partitionBy(*partition_cols)))\n",
|
||||
" df_temp = df_temp.withColumn(\"per_person_rate\",col(\"total_amount_partition\")/col(\"passenger_count\"))\n",
|
||||
" \n",
|
||||
" # # For pairs with the same total passengers, \n",
|
||||
" # # sort them in descending order of per_person_rate.\n",
|
||||
" # # Rename the column for total passengers to passenger_count. \n",
|
||||
" df_temp = df_temp.select(['PULocationID','DOLocationID','passenger_count','per_person_rate']).distinct()\n",
|
||||
" df_joined = group_by_result.join(df_temp, partition_cols)\n",
|
||||
" df_joined = df_joined.orderBy(['passenger_count','per_person_rate'], ascending=False).limit(10)\n",
|
||||
" df_output = df_joined.drop('count')\n",
|
||||
" # END YOUR CODE HERE -----------\n",
|
||||
" \n",
|
||||
" return df_output"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "127574ab",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Q1.c"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "36a8fd27",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Find trips which trip distances generate the highest tip percentage."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "376c981c",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def distance_with_most_tip(df):\n",
|
||||
" '''\n",
|
||||
" input: df a dataframe\n",
|
||||
" output: df a dataframe with following columns:\n",
|
||||
" - trip_distance\n",
|
||||
" - tip_percent\n",
|
||||
" \n",
|
||||
" trip_percent is the percent of tip out of fare_amount\n",
|
||||
" \n",
|
||||
" '''\n",
|
||||
" \n",
|
||||
" # START YOUR CODE HERE ---------\n",
|
||||
" \n",
|
||||
" # END YOUR CODE HERE -----------\n",
|
||||
" \n",
|
||||
" return df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "f0172fe6",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Q1.d"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "4613c906",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Determine the average speed at different times of day."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "abff9e24",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def time_with_most_traffic(df):\n",
|
||||
" '''\n",
|
||||
" input: df a dataframe\n",
|
||||
" output: df a dataframe with following columns:\n",
|
||||
" - time_of_day\n",
|
||||
" - am_avg_speed\n",
|
||||
" - pm_avg_speed\n",
|
||||
" \n",
|
||||
" trip_percent is the percent of tip out of fare_amount\n",
|
||||
" \n",
|
||||
" '''\n",
|
||||
" \n",
|
||||
" # START YOUR CODE HERE ---------\n",
|
||||
"\n",
|
||||
" # END YOUR CODE HERE -----------\n",
|
||||
" \n",
|
||||
" return df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "34cbd7b9",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### The below cells are for you to investigate your solutions and will not be graded\n",
|
||||
"## Ensure they are commented out prior to submitting to Gradescope to avoid errors"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "bf9abefb",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"+------------+------------+---------------+------------------+\n",
|
||||
"|PULocationID|DOLocationID|passenger_count| per_person_rate|\n",
|
||||
"+------------+------------+---------------+------------------+\n",
|
||||
"| 239| 238| 62| 4.26274198870505|\n",
|
||||
"| 237| 236| 60| 4.482500068346659|\n",
|
||||
"| 263| 141| 52|3.4190384974846473|\n",
|
||||
"| 161| 236| 42| 5.368571440378825|\n",
|
||||
"| 148| 79| 42| 4.711904752822149|\n",
|
||||
"| 142| 238| 39| 5.05487182812813|\n",
|
||||
"| 141| 236| 37| 4.355675723101641|\n",
|
||||
"| 239| 143| 37| 4.252162224537617|\n",
|
||||
"| 239| 142| 35| 3.817714350564139|\n",
|
||||
"| 79| 170| 34| 6.394705884596881|\n",
|
||||
"+------------+------------+---------------+------------------+\n",
|
||||
"\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# df = load_data()\n",
|
||||
"# df = clean_data(df)\n",
|
||||
"# common_pair(df).show()\n",
|
||||
"# distance_with_most_tip(df).show()\n",
|
||||
"# time_with_most_traffic(df).show()"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID,store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount,improvement_surcharge,total_amount,congestion_surcharge,trip_rate_rows_csv,trip_rate_csv,passenger_count_csv,total_amount_row,per_person_rate_csv,
|
||||
2,1/1/2019 0:16,1/1/2019 0:25,3,1.72,1,N,41,247,2,9,0.5,0.5,0,0,0.3,2,,1.162790698,,,,,
|
||||
2,1/1/2019 0:46,1/1/2019 0:49,3,0.3,1,N,107,107,1,4,0.5,0.5,1.06,0,0.3,2,,6.666666667,,,,,
|
||||
1,1/1/2019 0:50,1/1/2019 1:11,3,4.6,1,N,107,181,2,19,0.5,0.5,0,0,0.3,2,,0.434782609,,,,,
|
||||
1,1/1/2019 0:48,1/1/2019 1:00,3,1.5,1,N,113,90,2,7.5,0.5,0.5,0,0,0.3,2,,1.333333333,,,,,
|
||||
2,1/1/2019 0:46,1/1/2019 0:47,3,0.06,1,N,116,42,2,2.5,0.5,0.5,0,0,0.3,2,,33.33333333,,,,,
|
||||
1,1/1/2019 0:19,1/1/2019 0:57,3,12.3,1,N,138,50,1,38,0.5,0.5,4,5.76,0.3,2,,0.162601626,,,,,
|
||||
1,1/1/2019 0:58,1/1/2019 1:15,3,2.7,1,N,141,234,1,13,0.5,0.5,1,0,0.3,2,,0.740740741,,,,,
|
||||
1,1/1/2019 0:13,1/1/2019 0:22,3,1.3,1,N,144,261,2,7.5,0.5,0.5,0,0,0.3,2,,1.538461538,,,,,
|
||||
1,1/1/2019 0:46,1/1/2019 0:53,3,1.5,1,N,151,239,1,7,0.5,0.5,1.65,0,0.3,2,,1.333333333,5.133947134,30,20,0.666666667,0.666666667
|
||||
1,1/1/2019 0:59,1/1/2019 1:18,3,2.6,1,N,151,239,1,14,0.5,0.5,1,0,0.3,2,,0.769230769,,,,,0.666666667
|
||||
2,21/12/2018 13:48,21/12/2018 13:52,3,0,1,N,151,239,1,4.5,0.5,0.5,0,0,0.3,2,,0,,,,,0.666666667
|
||||
2,28/11/2018 15:52,28/11/2018 15:55,3,0,1,N,151,239,2,3.5,0.5,0.5,0,0,0.3,2,,0,,,,,0.666666667
|
||||
2,28/11/2018 15:56,28/11/2018 15:58,3,0,2,N,151,239,2,52,0,0.5,0,0,0.3,2,,0,,,,,0.666666667
|
||||
2,28/11/2018 16:25,28/11/2018 16:28,3,0,1,N,151,239,2,3.5,0.5,0.5,0,5.76,0.3,2,,0,,,,,0.666666667
|
||||
2,28/11/2018 16:29,28/11/2018 16:33,3,0,2,N,151,239,2,52,0,0.5,0,0,0.3,2,,0,,,,,0.666666667
|
||||
1,1/1/2019 0:21,1/1/2019 0:28,3,1.3,1,N,151,239,1,6.5,0.5,0.5,1.25,0,0.3,2,,1.538461538,,,,,0.666666667
|
||||
1,1/1/2019 0:32,1/1/2019 0:45,3,3.7,1,N,151,239,1,13.5,0.5,0.5,3.7,0,0.3,2,,0.540540541,,,,,0.666666667
|
||||
1,1/1/2019 0:57,1/1/2019 1:09,3,2.1,1,N,151,239,1,10,0.5,0.5,1.7,0,0.3,2,,0.952380952,,,,,0.666666667
|
||||
1,1/1/2019 0:55,1/1/2019 1:11,3,1.2,1,N,161,170,1,10.5,0.5,0.5,2.95,0,0.3,2,,1.666666667,,,,,
|
||||
1,1/1/2019 0:17,1/1/2019 0:22,6,0.6,1,N,161,161,1,5.5,0.5,0.5,1.35,0,0.3,6000,,10000,,,,,
|
||||
2,1/1/2019 0:57,1/1/2019 1:03,3,1.42,1,N,170,141,1,6.5,0.5,0.5,1.56,0,0.3,2,,1.408450704,,,,,
|
||||
2,1/1/2019 0:23,1/1/2019 0:25,3,0.38,1,N,170,170,2,3.5,0.5,0.5,0,0,0.3,2,,5.263157895,,,,,
|
||||
2,1/1/2019 0:39,1/1/2019 0:48,3,0.55,1,N,170,170,1,6.5,0.5,0.5,1.95,0,0.3,2,,3.636363636,,,,,
|
||||
1,1/1/2019 0:43,1/1/2019 1:07,3,6.3,1,N,224,25,1,21.5,0.5,0.5,5.7,0,0.3,2,,0.317460317,,,,,
|
||||
1,1/1/2019 0:26,1/1/2019 0:39,3,1.9,1,N,231,79,1,10.5,0.5,0.5,2,0,0.3,2,,1.052631579,,,,,
|
||||
1,1/1/2019 0:32,1/1/2019 0:32,1,0,1,Y,237,264,2,6.5,0.5,0.5,0,0,0.3,100,,#DIV/0!,,,,,
|
||||
1,1/1/2019 0:24,1/1/2019 0:47,10,2.8,1,N,246,162,1,15,0.5,0.5,3.25,0,0.3,9,,3.214285714,,,,,
|
||||
1,1/1/2019 0:21,1/1/2019 0:28,10,0.7,1,N,246,162,1,5.5,0.5,0.5,1.7,0,0.3,9,,12.85714286,,,,,
|
||||
1,1/1/2019 0:45,1/1/2019 1:31,10,8.7,1,N,246,162,1,34.5,0.5,0.5,7.15,0,0.3,9,,1.034482759,,,,,
|
|
@@ -0,0 +1,12 @@
|
||||
VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID,store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount,improvement_surcharge,total_amount,congestion_surcharge,trip_rate_rows_csv,trip_rate_csv,passenger_count_csv,total_amount_row,per_person_rate_csv,,,,,,,,,,,,
|
||||
1,2019-01-01 00:46:40,2019-01-01 00:53:20,1,1.50,1,N,151,239,1,7,0.5,0.5,1.65,0,0.3,9.95,,,,,,,,,,,,,,,,,,
|
||||
1,2019-01-01 00:59:47,2019-01-01 01:18:59,1,2.60,1,N,239,246,1,14,0.5,0.5,1,0,0.3,16.3,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 13:48:30,2018-12-21 13:52:40,3,999,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 01:48:30,2018-12-21 13:52:40,3,888,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 13:01:00,2018-12-21 13:59:59,3,777,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 13:59:59,2018-12-21 14:59:59,3,666,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 14:01:00,2018-12-21 14:01:59,3,555,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 14:59:59,2018-12-21 15:59:58,3,444,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,2,2018-11-28 15:52:25,2018-11-28 15:55:45,5,.00,1,N,193,193,2,3.5,0.5,0.5,0,0,0.3,7.55,
|
||||
2,2018-11-28 15:56:57,2018-11-28 15:58:33,5,333,2,N,193,193,2,52,0,0.5,0,0,0.3,55.55,,,,,,,,,,,,,,,,,,
|
||||
2,2018-11-28 16:25:49,2018-11-28 16:28:26,5,222,1,N,193,193,2,3.5,0.5,0.5,0,5.76,0.3,13.31,,,,,,,,,,,,,,,,,,
|
||||
2,2018-11-28 16:29:37,2018-11-28 16:33:43,5,.00,2,N,193,193,2,52,0,0.5,0,0,0.3,55.55,,,,,,,,,,,,,,,,,,
|
|
@@ -0,0 +1,12 @@
|
||||
VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID,store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount,improvement_surcharge,total_amount,congestion_surcharge,trip_rate_rows_csv,trip_rate_csv,passenger_count_csv,total_amount_row,per_person_rate_csv,,,,,,,,,,,,
|
||||
1,2019-01-01 00:46:40,2019-01-01 00:53:20,1,1.50,1,N,151,239,1,7,0.5,0.5,1.65,0,0.3,9.95,,,,,,,,,,,,,,,,,,
|
||||
1,2019-01-01 00:59:47,2019-01-01 01:18:59,1,2.60,1,N,239,246,1,14,0.5,0.5,1,0,0.3,16.3,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 13:48:30,2018-12-21 13:52:40,3,999,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 01:48:30,2018-12-21 13:52:40,3,888,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 13:01:00,2018-12-21 13:59:59,3,777,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 13:59:59,2018-12-21 14:59:59,3,666,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 14:01:00,2018-12-21 14:01:59,3,555,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,,,,,,,,,,,,,,,,,,
|
||||
2,2018-12-21 14:59:59,2018-12-21 15:59:58,3,444,1,N,236,236,1,4.5,0.5,0.5,0,0,0.3,5.8,2,2018-11-28 15:52:25,2018-11-28 15:55:45,5,.00,1,N,193,193,2,3.5,0.5,0.5,0,0,0.3,7.55,
|
||||
2,2018-11-28 15:56:57,2018-11-28 15:58:33,5,333,2,N,193,193,2,52,0,0.5,0,0,0.3,55.55,,,,,,,,,,,,,,,,,,
|
||||
2,2018-11-28 16:25:49,2018-11-28 16:28:26,5,222,1,N,193,193,2,3.5,0.5,0.5,0,5.76,0.3,13.31,,,,,,,,,,,,,,,,,,
|
||||
2,2018-11-28 16:29:37,2018-11-28 16:33:43,5,.00,2,N,193,193,2,52,0,0.5,0,0,0.3,55.55,,,,,,,,,,,,,,,,,,
|
|
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user