@@ -387,6 +387,7 @@ def read_gbq_table( # type: ignore[overload-overlap]
387
387
enable_snapshot : bool = ...,
388
388
dry_run : Literal [False ] = ...,
389
389
force_total_order : Optional [bool ] = ...,
390
+ n_rows : Optional [int ] = None ,
390
391
) -> dataframe .DataFrame :
391
392
...
392
393
@@ -408,6 +409,7 @@ def read_gbq_table(
408
409
enable_snapshot : bool = ...,
409
410
dry_run : Literal [True ] = ...,
410
411
force_total_order : Optional [bool ] = ...,
412
+ n_rows : Optional [int ] = None ,
411
413
) -> pandas .Series :
412
414
...
413
415
@@ -428,6 +430,7 @@ def read_gbq_table(
428
430
enable_snapshot : bool = True ,
429
431
dry_run : bool = False ,
430
432
force_total_order : Optional [bool ] = None ,
433
+ n_rows : Optional [int ] = None ,
431
434
) -> dataframe .DataFrame | pandas .Series :
432
435
import bigframes ._tools .strings
433
436
import bigframes .dataframe as dataframe
@@ -618,6 +621,7 @@ def read_gbq_table(
618
621
at_time = time_travel_timestamp if enable_snapshot else None ,
619
622
primary_key = primary_key ,
620
623
session = self ._session ,
624
+ n_rows = n_rows ,
621
625
)
622
626
# if we don't have a unique index, we order by row hash if we are in strict mode
623
627
if (
@@ -852,6 +856,7 @@ def read_gbq_query(
852
856
columns = columns ,
853
857
use_cache = configuration ["query" ]["useQueryCache" ],
854
858
force_total_order = force_total_order ,
859
+ n_rows = query_job .result ().total_rows ,
855
860
# max_results and filters are omitted because they are already
856
861
# handled by to_query(), above.
857
862
)
0 commit comments