1import warnings 2 3import hashlib 4import io 5import json 6import jsonschema 7import pandas as pd 8from toolz.curried import pipe as _pipe 9 10from .schema import core, channels, mixins, Undefined, SCHEMA_URL 11 12from .data import data_transformers 13from ... import utils, expr 14from .display import renderers, VEGALITE_VERSION, VEGAEMBED_VERSION, VEGA_VERSION 15from .theme import themes 16 17 18# ------------------------------------------------------------------------ 19# Data Utilities 20def _dataset_name(values): 21 """Generate a unique hash of the data 22 23 Parameters 24 ---------- 25 values : list or dict 26 A list/dict representation of data values. 27 28 Returns 29 ------- 30 name : string 31 A unique name generated from the hash of the values. 32 """ 33 if isinstance(values, core.InlineDataset): 34 values = values.to_dict() 35 values_json = json.dumps(values, sort_keys=True) 36 hsh = hashlib.md5(values_json.encode()).hexdigest() 37 return "data-" + hsh 38 39 40def _consolidate_data(data, context): 41 """If data is specified inline, then move it to context['datasets'] 42 43 This function will modify context in-place, and return a new version of data 44 """ 45 values = Undefined 46 kwds = {} 47 48 if isinstance(data, core.InlineData): 49 if data.name is Undefined and data.values is not Undefined: 50 values = data.values 51 kwds = {"format": data.format} 52 53 elif isinstance(data, dict): 54 if "name" not in data and "values" in data: 55 values = data["values"] 56 kwds = {k: v for k, v in data.items() if k != "values"} 57 58 if values is not Undefined: 59 name = _dataset_name(values) 60 data = core.NamedData(name=name, **kwds) 61 context.setdefault("datasets", {})[name] = values 62 63 return data 64 65 66def _prepare_data(data, context=None): 67 """Convert input data to data for use within schema 68 69 Parameters 70 ---------- 71 data : 72 The input dataset in the form of a DataFrame, dictionary, altair data 73 object, or other type that is recognized by the data transformers. 74 context : dict (optional) 75 The to_dict context in which the data is being prepared. This is used 76 to keep track of information that needs to be passed up and down the 77 recursive serialization routine, such as global named datasets. 78 """ 79 if data is Undefined: 80 return data 81 82 # convert dataframes or objects with __geo_interface__ to dict 83 if isinstance(data, pd.DataFrame) or hasattr(data, "__geo_interface__"): 84 data = _pipe(data, data_transformers.get()) 85 86 # convert string input to a URLData 87 if isinstance(data, str): 88 data = core.UrlData(data) 89 90 # consolidate inline data to top-level datasets 91 if context is not None and data_transformers.consolidate_datasets: 92 data = _consolidate_data(data, context) 93 94 # if data is still not a recognized type, then return 95 if not isinstance(data, (dict, core.Data)): 96 warnings.warn("data of type {} not recognized".format(type(data))) 97 98 return data 99 100 101# ------------------------------------------------------------------------ 102# Aliases & specializations 103Bin = core.BinParams 104 105 106@utils.use_signature(core.LookupData) 107class LookupData(core.LookupData): 108 def to_dict(self, *args, **kwargs): 109 """Convert the chart to a dictionary suitable for JSON export""" 110 copy = self.copy(deep=False) 111 copy.data = _prepare_data(copy.data, kwargs.get("context")) 112 return super(LookupData, copy).to_dict(*args, **kwargs) 113 114 115@utils.use_signature(core.FacetMapping) 116class FacetMapping(core.FacetMapping): 117 _class_is_valid_at_instantiation = False 118 119 def to_dict(self, *args, **kwargs): 120 copy = self.copy(deep=False) 121 context = kwargs.get("context", {}) 122 data = context.get("data", None) 123 if isinstance(self.row, str): 124 copy.row = core.FacetFieldDef(**utils.parse_shorthand(self.row, data)) 125 if isinstance(self.column, str): 126 copy.column = core.FacetFieldDef(**utils.parse_shorthand(self.column, data)) 127 return super(FacetMapping, copy).to_dict(*args, **kwargs) 128 129 130# ------------------------------------------------------------------------ 131# Encoding will contain channel objects that aren't valid at instantiation 132core.FacetedEncoding._class_is_valid_at_instantiation = False 133 134# ------------------------------------------------------------------------ 135# These are parameters that are valid at the top level, but are not valid 136# for specs that are within a composite chart 137# (layer, hconcat, vconcat, facet, repeat) 138TOPLEVEL_ONLY_KEYS = {"background", "config", "autosize", "padding", "$schema"} 139 140 141def _get_channels_mapping(): 142 mapping = {} 143 for attr in dir(channels): 144 cls = getattr(channels, attr) 145 if isinstance(cls, type) and issubclass(cls, core.SchemaBase): 146 mapping[cls] = attr.replace("Value", "").lower() 147 return mapping 148 149 150# ------------------------------------------------------------------------- 151# Tools for working with selections 152class Selection(object): 153 """A Selection object""" 154 155 _counter = 0 156 157 @classmethod 158 def _get_name(cls): 159 cls._counter += 1 160 return "selector{:03d}".format(cls._counter) 161 162 def __init__(self, name, selection): 163 if name is None: 164 name = self._get_name() 165 self.name = name 166 self.selection = selection 167 168 def __repr__(self): 169 return "Selection({0!r}, {1})".format(self.name, self.selection) 170 171 def ref(self): 172 return {"selection": self.name} 173 174 def to_dict(self): 175 return {"selection": self.name} 176 177 def __invert__(self): 178 return Selection(core.SelectionNot(**{"not": self.name}), self.selection) 179 180 def __and__(self, other): 181 if isinstance(other, Selection): 182 other = other.name 183 return Selection( 184 core.SelectionAnd(**{"and": [self.name, other]}), self.selection 185 ) 186 187 def __or__(self, other): 188 if isinstance(other, Selection): 189 other = other.name 190 return Selection(core.SelectionOr(**{"or": [self.name, other]}), self.selection) 191 192 def __getattr__(self, field_name): 193 return expr.core.GetAttrExpression(self.name, field_name) 194 195 def __getitem__(self, field_name): 196 return expr.core.GetItemExpression(self.name, field_name) 197 198 199# ------------------------------------------------------------------------ 200# Top-Level Functions 201 202 203def value(value, **kwargs): 204 """Specify a value for use in an encoding""" 205 return dict(value=value, **kwargs) 206 207 208def selection(name=None, type=Undefined, **kwds): 209 """Create a named selection. 210 211 Parameters 212 ---------- 213 name : string (optional) 214 The name of the selection. If not specified, a unique name will be 215 created. 216 type : string 217 The type of the selection: one of ["interval", "single", or "multi"] 218 **kwds : 219 additional keywords will be used to construct a SelectionDef instance 220 that controls the selection. 221 222 Returns 223 ------- 224 selection: Selection 225 The selection object that can be used in chart creation. 226 """ 227 return Selection(name, core.SelectionDef(type=type, **kwds)) 228 229 230@utils.use_signature(core.IntervalSelection) 231def selection_interval(**kwargs): 232 """Create a selection with type='interval'""" 233 return selection(type="interval", **kwargs) 234 235 236@utils.use_signature(core.MultiSelection) 237def selection_multi(**kwargs): 238 """Create a selection with type='multi'""" 239 return selection(type="multi", **kwargs) 240 241 242@utils.use_signature(core.SingleSelection) 243def selection_single(**kwargs): 244 """Create a selection with type='single'""" 245 return selection(type="single", **kwargs) 246 247 248@utils.use_signature(core.Binding) 249def binding(input, **kwargs): 250 """A generic binding""" 251 return core.Binding(input=input, **kwargs) 252 253 254@utils.use_signature(core.BindCheckbox) 255def binding_checkbox(**kwargs): 256 """A checkbox binding""" 257 return core.BindCheckbox(input="checkbox", **kwargs) 258 259 260@utils.use_signature(core.BindRadioSelect) 261def binding_radio(**kwargs): 262 """A radio button binding""" 263 return core.BindRadioSelect(input="radio", **kwargs) 264 265 266@utils.use_signature(core.BindRadioSelect) 267def binding_select(**kwargs): 268 """A select binding""" 269 return core.BindRadioSelect(input="select", **kwargs) 270 271 272@utils.use_signature(core.BindRange) 273def binding_range(**kwargs): 274 """A range binding""" 275 return core.BindRange(input="range", **kwargs) 276 277 278def condition(predicate, if_true, if_false, **kwargs): 279 """A conditional attribute or encoding 280 281 Parameters 282 ---------- 283 predicate: Selection, LogicalOperandPredicate, expr.Expression, dict, or string 284 the selection predicate or test predicate for the condition. 285 if a string is passed, it will be treated as a test operand. 286 if_true: 287 the spec or object to use if the selection predicate is true 288 if_false: 289 the spec or object to use if the selection predicate is false 290 **kwargs: 291 additional keyword args are added to the resulting dict 292 293 Returns 294 ------- 295 spec: dict or VegaLiteSchema 296 the spec that describes the condition 297 """ 298 test_predicates = (str, expr.Expression, core.LogicalOperandPredicate) 299 300 if isinstance(predicate, Selection): 301 condition = {"selection": predicate.name} 302 elif isinstance(predicate, core.SelectionOperand): 303 condition = {"selection": predicate} 304 elif isinstance(predicate, test_predicates): 305 condition = {"test": predicate} 306 elif isinstance(predicate, dict): 307 condition = predicate 308 else: 309 raise NotImplementedError( 310 "condition predicate of type {}" "".format(type(predicate)) 311 ) 312 313 if isinstance(if_true, core.SchemaBase): 314 # convert to dict for now; the from_dict call below will wrap this 315 # dict in the appropriate schema 316 if_true = if_true.to_dict() 317 elif isinstance(if_true, str): 318 if_true = {"shorthand": if_true} 319 if_true.update(kwargs) 320 condition.update(if_true) 321 322 if isinstance(if_false, core.SchemaBase): 323 # For the selection, the channel definitions all allow selections 324 # already. So use this SchemaBase wrapper if possible. 325 selection = if_false.copy() 326 selection.condition = condition 327 elif isinstance(if_false, str): 328 selection = {"condition": condition, "shorthand": if_false} 329 selection.update(kwargs) 330 else: 331 selection = dict(condition=condition, **if_false) 332 333 return selection 334 335 336# -------------------------------------------------------------------- 337# Top-level objects 338 339 340class TopLevelMixin(mixins.ConfigMethodMixin): 341 """Mixin for top-level chart objects such as Chart, LayeredChart, etc.""" 342 343 _class_is_valid_at_instantiation = False 344 345 def to_dict(self, *args, **kwargs): 346 """Convert the chart to a dictionary suitable for JSON export""" 347 # We make use of three context markers: 348 # - 'data' points to the data that should be referenced for column type 349 # inference. 350 # - 'top_level' is a boolean flag that is assumed to be true; if it's 351 # true then a "$schema" arg is added to the dict. 352 # - 'datasets' is a dict of named datasets that should be inserted 353 # in the top-level object 354 355 # note: not a deep copy because we want datasets and data arguments to 356 # be passed by reference 357 context = kwargs.get("context", {}).copy() 358 context.setdefault("datasets", {}) 359 is_top_level = context.get("top_level", True) 360 361 copy = self.copy(deep=False) 362 original_data = getattr(copy, "data", Undefined) 363 copy.data = _prepare_data(original_data, context) 364 365 if original_data is not Undefined: 366 context["data"] = original_data 367 368 # remaining to_dict calls are not at top level 369 context["top_level"] = False 370 kwargs["context"] = context 371 372 try: 373 dct = super(TopLevelMixin, copy).to_dict(*args, **kwargs) 374 except jsonschema.ValidationError: 375 dct = None 376 377 # If we hit an error, then re-convert with validate='deep' to get 378 # a more useful traceback. We don't do this by default because it's 379 # much slower in the case that there are no errors. 380 if dct is None: 381 kwargs["validate"] = "deep" 382 dct = super(TopLevelMixin, copy).to_dict(*args, **kwargs) 383 384 # TODO: following entries are added after validation. Should they be validated? 385 if is_top_level: 386 # since this is top-level we add $schema if it's missing 387 if "$schema" not in dct: 388 dct["$schema"] = SCHEMA_URL 389 390 # apply theme from theme registry 391 the_theme = themes.get() 392 dct = utils.update_nested(the_theme(), dct, copy=True) 393 394 # update datasets 395 if context["datasets"]: 396 dct.setdefault("datasets", {}).update(context["datasets"]) 397 398 return dct 399 400 def to_html( 401 self, 402 base_url="https://cdn.jsdelivr.net/npm/", 403 output_div="vis", 404 embed_options=None, 405 json_kwds=None, 406 fullhtml=True, 407 requirejs=False, 408 ): 409 return utils.spec_to_html( 410 self.to_dict(), 411 mode="vega-lite", 412 vegalite_version=VEGALITE_VERSION, 413 vegaembed_version=VEGAEMBED_VERSION, 414 vega_version=VEGA_VERSION, 415 base_url=base_url, 416 output_div=output_div, 417 embed_options=embed_options, 418 json_kwds=json_kwds, 419 fullhtml=fullhtml, 420 requirejs=requirejs, 421 ) 422 423 @utils.deprecation.deprecated( 424 "Chart.savechart is deprecated in favor of Chart.save" 425 ) 426 def savechart(self, fp, format=None, **kwargs): 427 """Save a chart to file in a variety of formats 428 429 Supported formats are json, html, png, svg 430 431 Parameters 432 ---------- 433 fp : string filename or file-like object 434 file in which to write the chart. 435 format : string (optional) 436 the format to write: one of ['json', 'html', 'png', 'svg']. 437 If not specified, the format will be determined from the filename. 438 **kwargs : 439 Additional keyword arguments are passed to the output method 440 associated with the specified format. 441 442 """ 443 return self.save(fp, format=None, **kwargs) 444 445 def save( 446 self, 447 fp, 448 format=None, 449 override_data_transformer=True, 450 scale_factor=1.0, 451 vegalite_version=VEGALITE_VERSION, 452 vega_version=VEGA_VERSION, 453 vegaembed_version=VEGAEMBED_VERSION, 454 **kwargs, 455 ): 456 """Save a chart to file in a variety of formats 457 458 Supported formats are json, html, png, svg 459 460 Parameters 461 ---------- 462 fp : string filename or file-like object 463 file in which to write the chart. 464 format : string (optional) 465 the format to write: one of ['json', 'html', 'png', 'svg']. 466 If not specified, the format will be determined from the filename. 467 override_data_transformer : boolean (optional) 468 If True (default), then the save action will be done with 469 the MaxRowsError disabled. If False, then do not change the data 470 transformer. 471 scale_factor : float 472 For svg or png formats, scale the image by this factor when saving. 473 This can be used to control the size or resolution of the output. 474 Default is 1.0 475 **kwargs : 476 Additional keyword arguments are passed to the output method 477 associated with the specified format. 478 479 """ 480 from ...utils.save import save 481 482 kwds = dict( 483 chart=self, 484 fp=fp, 485 format=format, 486 scale_factor=scale_factor, 487 vegalite_version=vegalite_version, 488 vega_version=vega_version, 489 vegaembed_version=vegaembed_version, 490 **kwargs, 491 ) 492 493 # By default we override the data transformer. This makes it so 494 # that save() will succeed even for large datasets that would 495 # normally trigger a MaxRowsError 496 if override_data_transformer: 497 with data_transformers.disable_max_rows(): 498 result = save(**kwds) 499 else: 500 result = save(**kwds) 501 return result 502 503 # Fallback for when rendering fails; the full repr is too long to be 504 # useful in nearly all cases. 505 def __repr__(self): 506 return "alt.{}(...)".format(self.__class__.__name__) 507 508 # Layering and stacking 509 def __add__(self, other): 510 if not isinstance(other, TopLevelMixin): 511 raise ValueError("Only Chart objects can be layered.") 512 return layer(self, other) 513 514 def __and__(self, other): 515 if not isinstance(other, TopLevelMixin): 516 raise ValueError("Only Chart objects can be concatenated.") 517 return vconcat(self, other) 518 519 def __or__(self, other): 520 if not isinstance(other, TopLevelMixin): 521 raise ValueError("Only Chart objects can be concatenated.") 522 return hconcat(self, other) 523 524 def repeat( 525 self, 526 repeat=Undefined, 527 row=Undefined, 528 column=Undefined, 529 columns=Undefined, 530 **kwargs, 531 ): 532 """Return a RepeatChart built from the chart 533 534 Fields within the chart can be set to correspond to the row or 535 column using `alt.repeat('row')` and `alt.repeat('column')`. 536 537 Parameters 538 ---------- 539 repeat : list 540 a list of data column names to be repeated. This cannot be 541 used along with the ``row`` or ``column`` argument. 542 row : list 543 a list of data column names to be mapped to the row facet 544 column : list 545 a list of data column names to be mapped to the column facet 546 columns : int 547 the maximum number of columns before wrapping. Only referenced 548 if ``repeat`` is specified. 549 **kwargs : 550 additional keywords passed to RepeatChart. 551 552 Returns 553 ------- 554 chart : RepeatChart 555 a repeated chart. 556 """ 557 repeat_specified = repeat is not Undefined 558 rowcol_specified = row is not Undefined or column is not Undefined 559 560 if repeat_specified and rowcol_specified: 561 raise ValueError( 562 "repeat argument cannot be combined with row/column argument." 563 ) 564 565 if repeat_specified: 566 repeat = repeat 567 else: 568 repeat = core.RepeatMapping(row=row, column=column) 569 570 return RepeatChart(spec=self, repeat=repeat, columns=columns, **kwargs) 571 572 def properties(self, **kwargs): 573 """Set top-level properties of the Chart. 574 575 Argument names and types are the same as class initialization. 576 """ 577 copy = self.copy(deep=False) 578 for key, val in kwargs.items(): 579 if key == "selection" and isinstance(val, Selection): 580 # For backward compatibility with old selection interface. 581 setattr(copy, key, {val.name: val.selection}) 582 else: 583 # Don't validate data, because it hasn't been processed. 584 if key != "data": 585 self.validate_property(key, val) 586 setattr(copy, key, val) 587 return copy 588 589 def project( 590 self, 591 type="mercator", 592 center=Undefined, 593 clipAngle=Undefined, 594 clipExtent=Undefined, 595 coefficient=Undefined, 596 distance=Undefined, 597 fraction=Undefined, 598 lobes=Undefined, 599 parallel=Undefined, 600 precision=Undefined, 601 radius=Undefined, 602 ratio=Undefined, 603 reflectX=Undefined, 604 reflectY=Undefined, 605 rotate=Undefined, 606 scale=Undefined, 607 spacing=Undefined, 608 tilt=Undefined, 609 translate=Undefined, 610 **kwds, 611 ): 612 """Add a geographic projection to the chart. 613 614 This is generally used either with ``mark_geoshape`` or with the 615 ``latitude``/``longitude`` encodings. 616 617 Available projection types are 618 ['albers', 'albersUsa', 'azimuthalEqualArea', 'azimuthalEquidistant', 619 'conicConformal', 'conicEqualArea', 'conicEquidistant', 'equalEarth', 'equirectangular', 620 'gnomonic', 'identity', 'mercator', 'orthographic', 'stereographic', 'transverseMercator'] 621 622 Attributes 623 ---------- 624 type : ProjectionType 625 The cartographic projection to use. This value is case-insensitive, for example 626 `"albers"` and `"Albers"` indicate the same projection type. You can find all valid 627 projection types [in the 628 documentation](https://vega.github.io/vega-lite/docs/projection.html#projection-types). 629 630 **Default value:** `mercator` 631 center : List(float) 632 Sets the projection’s center to the specified center, a two-element array of 633 longitude and latitude in degrees. 634 635 **Default value:** `[0, 0]` 636 clipAngle : float 637 Sets the projection’s clipping circle radius to the specified angle in degrees. If 638 `null`, switches to [antimeridian](http://bl.ocks.org/mbostock/3788999) cutting 639 rather than small-circle clipping. 640 clipExtent : List(List(float)) 641 Sets the projection’s viewport clip extent to the specified bounds in pixels. The 642 extent bounds are specified as an array `[[x0, y0], [x1, y1]]`, where `x0` is the 643 left-side of the viewport, `y0` is the top, `x1` is the right and `y1` is the 644 bottom. If `null`, no viewport clipping is performed. 645 coefficient : float 646 647 distance : float 648 649 fraction : float 650 651 lobes : float 652 653 parallel : float 654 655 precision : Mapping(required=[length]) 656 Sets the threshold for the projection’s [adaptive 657 resampling](http://bl.ocks.org/mbostock/3795544) to the specified value in pixels. 658 This value corresponds to the [Douglas–Peucker 659 distance](http://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm). 660 If precision is not specified, returns the projection’s current resampling 661 precision which defaults to `√0.5 ≅ 0.70710…`. 662 radius : float 663 664 ratio : float 665 666 reflectX : boolean 667 668 reflectY : boolean 669 670 rotate : List(float) 671 Sets the projection’s three-axis rotation to the specified angles, which must be a 672 two- or three-element array of numbers [`lambda`, `phi`, `gamma`] specifying the 673 rotation angles in degrees about each spherical axis. (These correspond to yaw, 674 pitch and roll.) 675 676 **Default value:** `[0, 0, 0]` 677 scale : float 678 Sets the projection's scale (zoom) value, overriding automatic fitting. 679 680 spacing : float 681 682 tilt : float 683 684 translate : List(float) 685 Sets the projection's translation (pan) value, overriding automatic fitting. 686 687 """ 688 projection = core.Projection( 689 center=center, 690 clipAngle=clipAngle, 691 clipExtent=clipExtent, 692 coefficient=coefficient, 693 distance=distance, 694 fraction=fraction, 695 lobes=lobes, 696 parallel=parallel, 697 precision=precision, 698 radius=radius, 699 ratio=ratio, 700 reflectX=reflectX, 701 reflectY=reflectY, 702 rotate=rotate, 703 scale=scale, 704 spacing=spacing, 705 tilt=tilt, 706 translate=translate, 707 type=type, 708 **kwds, 709 ) 710 return self.properties(projection=projection) 711 712 def _add_transform(self, *transforms): 713 """Copy the chart and add specified transforms to chart.transform""" 714 copy = self.copy(deep=["transform"]) 715 if copy.transform is Undefined: 716 copy.transform = [] 717 copy.transform.extend(transforms) 718 return copy 719 720 def transform_aggregate(self, aggregate=Undefined, groupby=Undefined, **kwds): 721 """ 722 Add an AggregateTransform to the schema. 723 724 Parameters 725 ---------- 726 aggregate : List(:class:`AggregatedFieldDef`) 727 Array of objects that define fields to aggregate. 728 groupby : List(string) 729 The data fields to group by. If not specified, a single group containing all data 730 objects will be used. 731 **kwds : 732 additional keywords are converted to aggregates using standard 733 shorthand parsing. 734 735 Returns 736 ------- 737 self : Chart object 738 returns chart to allow for chaining 739 740 Examples 741 -------- 742 The aggregate transform allows you to specify transforms directly using 743 the same shorthand syntax as used in encodings: 744 745 >>> import altair as alt 746 >>> chart1 = alt.Chart().transform_aggregate( 747 ... mean_acc='mean(Acceleration)', 748 ... groupby=['Origin'] 749 ... ) 750 >>> print(chart1.transform[0].to_json()) # doctest: +NORMALIZE_WHITESPACE 751 { 752 "aggregate": [ 753 { 754 "as": "mean_acc", 755 "field": "Acceleration", 756 "op": "mean" 757 } 758 ], 759 "groupby": [ 760 "Origin" 761 ] 762 } 763 764 It also supports including AggregatedFieldDef instances or dicts directly, 765 so you can create the above transform like this: 766 767 >>> chart2 = alt.Chart().transform_aggregate( 768 ... [alt.AggregatedFieldDef(field='Acceleration', op='mean', 769 ... **{'as': 'mean_acc'})], 770 ... groupby=['Origin'] 771 ... ) 772 >>> chart2.transform == chart1.transform 773 True 774 775 See Also 776 -------- 777 alt.AggregateTransform : underlying transform object 778 779 """ 780 if aggregate is Undefined: 781 aggregate = [] 782 for key, val in kwds.items(): 783 parsed = utils.parse_shorthand(val) 784 dct = { 785 "as": key, 786 "field": parsed.get("field", Undefined), 787 "op": parsed.get("aggregate", Undefined), 788 } 789 aggregate.append(core.AggregatedFieldDef(**dct)) 790 return self._add_transform( 791 core.AggregateTransform(aggregate=aggregate, groupby=groupby) 792 ) 793 794 def transform_bin(self, as_=Undefined, field=Undefined, bin=True, **kwargs): 795 """ 796 Add a BinTransform to the schema. 797 798 Parameters 799 ---------- 800 as_ : anyOf(string, List(string)) 801 The output fields at which to write the start and end bin values. 802 bin : anyOf(boolean, :class:`BinParams`) 803 An object indicating bin properties, or simply ``true`` for using default bin 804 parameters. 805 field : string 806 The data field to bin. 807 808 Returns 809 ------- 810 self : Chart object 811 returns chart to allow for chaining 812 813 Examples 814 -------- 815 >>> import altair as alt 816 >>> chart = alt.Chart().transform_bin("x_binned", "x") 817 >>> chart.transform[0] 818 BinTransform({ 819 as: 'x_binned', 820 bin: True, 821 field: 'x' 822 }) 823 824 >>> chart = alt.Chart().transform_bin("x_binned", "x", 825 ... bin=alt.Bin(maxbins=10)) 826 >>> chart.transform[0] 827 BinTransform({ 828 as: 'x_binned', 829 bin: BinParams({ 830 maxbins: 10 831 }), 832 field: 'x' 833 }) 834 835 See Also 836 -------- 837 alt.BinTransform : underlying transform object 838 839 """ 840 if as_ is not Undefined: 841 if "as" in kwargs: 842 raise ValueError( 843 "transform_bin: both 'as_' and 'as' passed as arguments." 844 ) 845 kwargs["as"] = as_ 846 kwargs["bin"] = bin 847 kwargs["field"] = field 848 return self._add_transform(core.BinTransform(**kwargs)) 849 850 def transform_calculate(self, as_=Undefined, calculate=Undefined, **kwargs): 851 """ 852 Add a CalculateTransform to the schema. 853 854 Parameters 855 ---------- 856 as_ : string 857 The field for storing the computed formula value. 858 calculate : string or alt.expr expression 859 A `expression <https://vega.github.io/vega-lite/docs/types.html#expression>`__ 860 string. Use the variable ``datum`` to refer to the current data object. 861 **kwargs 862 transforms can also be passed by keyword argument; see Examples 863 864 Returns 865 ------- 866 self : Chart object 867 returns chart to allow for chaining 868 869 Examples 870 -------- 871 >>> import altair as alt 872 >>> from altair import datum, expr 873 874 >>> chart = alt.Chart().transform_calculate(y = 2 * expr.sin(datum.x)) 875 >>> chart.transform[0] 876 CalculateTransform({ 877 as: 'y', 878 calculate: (2 * sin(datum.x)) 879 }) 880 881 It's also possible to pass the ``CalculateTransform`` arguments directly: 882 883 >>> kwds = {'as': 'y', 'calculate': '2 * sin(datum.x)'} 884 >>> chart = alt.Chart().transform_calculate(**kwds) 885 >>> chart.transform[0] 886 CalculateTransform({ 887 as: 'y', 888 calculate: '2 * sin(datum.x)' 889 }) 890 891 As the first form is easier to write and understand, that is the 892 recommended method. 893 894 See Also 895 -------- 896 alt.CalculateTransform : underlying transform object 897 """ 898 if as_ is Undefined: 899 as_ = kwargs.pop("as", Undefined) 900 else: 901 if "as" in kwargs: 902 raise ValueError( 903 "transform_calculate: both 'as_' and 'as' passed as arguments." 904 ) 905 if as_ is not Undefined or calculate is not Undefined: 906 dct = {"as": as_, "calculate": calculate} 907 self = self._add_transform(core.CalculateTransform(**dct)) 908 for as_, calculate in kwargs.items(): 909 dct = {"as": as_, "calculate": calculate} 910 self = self._add_transform(core.CalculateTransform(**dct)) 911 return self 912 913 def transform_impute( 914 self, 915 impute, 916 key, 917 frame=Undefined, 918 groupby=Undefined, 919 keyvals=Undefined, 920 method=Undefined, 921 value=Undefined, 922 ): 923 """ 924 Add an ImputeTransform to the schema. 925 926 Parameters 927 ---------- 928 impute : string 929 The data field for which the missing values should be imputed. 930 key : string 931 A key field that uniquely identifies data objects within a group. 932 Missing key values (those occurring in the data but not in the current group) will 933 be imputed. 934 frame : List(anyOf(None, float)) 935 A frame specification as a two-element array used to control the window over which 936 the specified method is applied. The array entries should either be a number 937 indicating the offset from the current data object, or null to indicate unbounded 938 rows preceding or following the current data object. For example, the value ``[-5, 939 5]`` indicates that the window should include five objects preceding and five 940 objects following the current object. 941 **Default value:** : ``[null, null]`` indicating that the window includes all 942 objects. 943 groupby : List(string) 944 An optional array of fields by which to group the values. 945 Imputation will then be performed on a per-group basis. 946 keyvals : anyOf(List(Mapping(required=[])), :class:`ImputeSequence`) 947 Defines the key values that should be considered for imputation. 948 An array of key values or an object defining a `number sequence 949 <https://vega.github.io/vega-lite/docs/impute.html#sequence-def>`__. 950 If provided, this will be used in addition to the key values observed within the 951 input data. If not provided, the values will be derived from all unique values of 952 the ``key`` field. For ``impute`` in ``encoding``, the key field is the x-field if 953 the y-field is imputed, or vice versa. 954 If there is no impute grouping, this property *must* be specified. 955 method : :class:`ImputeMethod` 956 The imputation method to use for the field value of imputed data objects. 957 One of ``value``, ``mean``, ``median``, ``max`` or ``min``. 958 **Default value:** ``"value"`` 959 value : Mapping(required=[]) 960 The field value to use when the imputation ``method`` is ``"value"``. 961 962 Returns 963 ------- 964 self : Chart object 965 returns chart to allow for chaining 966 967 See Also 968 -------- 969 alt.ImputeTransform : underlying transform object 970 """ 971 return self._add_transform( 972 core.ImputeTransform( 973 impute=impute, 974 key=key, 975 frame=frame, 976 groupby=groupby, 977 keyvals=keyvals, 978 method=method, 979 value=value, 980 ) 981 ) 982 983 def transform_joinaggregate( 984 self, joinaggregate=Undefined, groupby=Undefined, **kwargs 985 ): 986 """ 987 Add a JoinAggregateTransform to the schema. 988 989 Parameters 990 ---------- 991 joinaggregate : List(:class:`JoinAggregateFieldDef`) 992 The definition of the fields in the join aggregate, and what calculations to use. 993 groupby : List(string) 994 The data fields for partitioning the data objects into separate groups. If 995 unspecified, all data points will be in a single group. 996 **kwargs 997 joinaggregates can also be passed by keyword argument; see Examples. 998 999 Returns 1000 ------- 1001 self : Chart object 1002 returns chart to allow for chaining 1003 1004 Examples 1005 -------- 1006 >>> import altair as alt 1007 >>> chart = alt.Chart().transform_joinaggregate(x='sum(y)') 1008 >>> chart.transform[0] 1009 JoinAggregateTransform({ 1010 joinaggregate: [JoinAggregateFieldDef({ 1011 as: 'x', 1012 field: 'y', 1013 op: 'sum' 1014 })] 1015 }) 1016 1017 See Also 1018 -------- 1019 alt.JoinAggregateTransform : underlying transform object 1020 """ 1021 if joinaggregate is Undefined: 1022 joinaggregate = [] 1023 for key, val in kwargs.items(): 1024 parsed = utils.parse_shorthand(val) 1025 dct = { 1026 "as": key, 1027 "field": parsed.get("field", Undefined), 1028 "op": parsed.get("aggregate", Undefined), 1029 } 1030 joinaggregate.append(core.JoinAggregateFieldDef(**dct)) 1031 return self._add_transform( 1032 core.JoinAggregateTransform(joinaggregate=joinaggregate, groupby=groupby) 1033 ) 1034 1035 def transform_filter(self, filter, **kwargs): 1036 """ 1037 Add a FilterTransform to the schema. 1038 1039 Parameters 1040 ---------- 1041 filter : a filter expression or :class:`LogicalOperandPredicate` 1042 The `filter` property must be one of the predicate definitions: 1043 (1) a string or alt.expr expression 1044 (2) a range predicate 1045 (3) a selection predicate 1046 (4) a logical operand combining (1)-(3) 1047 (5) a Selection object 1048 1049 Returns 1050 ------- 1051 self : Chart object 1052 returns chart to allow for chaining 1053 1054 See Also 1055 -------- 1056 alt.FilterTransform : underlying transform object 1057 1058 """ 1059 if isinstance(filter, Selection): 1060 filter = {"selection": filter.name} 1061 elif isinstance(filter, core.SelectionOperand): 1062 filter = {"selection": filter} 1063 return self._add_transform(core.FilterTransform(filter=filter, **kwargs)) 1064 1065 def transform_flatten(self, flatten, as_=Undefined): 1066 """Add a FlattenTransform to the schema. 1067 1068 Parameters 1069 ---------- 1070 flatten : List(string) 1071 An array of one or more data fields containing arrays to flatten. 1072 If multiple fields are specified, their array values should have a parallel 1073 structure, ideally with the same length. 1074 If the lengths of parallel arrays do not match, 1075 the longest array will be used with ``null`` values added for missing entries. 1076 as : List(string) 1077 The output field names for extracted array values. 1078 **Default value:** The field name of the corresponding array field 1079 1080 Returns 1081 ------- 1082 self : Chart object 1083 returns chart to allow for chaining 1084 1085 See Also 1086 -------- 1087 alt.FlattenTransform : underlying transform object 1088 """ 1089 return self._add_transform( 1090 core.FlattenTransform(flatten=flatten, **{"as": as_}) 1091 ) 1092 1093 def transform_fold(self, fold, as_=Undefined): 1094 """Add a FoldTransform to the schema. 1095 1096 Parameters 1097 ---------- 1098 fold : List(string) 1099 An array of data fields indicating the properties to fold. 1100 as : [string, string] 1101 The output field names for the key and value properties produced by the fold 1102 transform. Default: ``["key", "value"]`` 1103 1104 Returns 1105 ------- 1106 self : Chart object 1107 returns chart to allow for chaining 1108 1109 See Also 1110 -------- 1111 alt.FoldTransform : underlying transform object 1112 """ 1113 return self._add_transform(core.FoldTransform(fold=fold, **{"as": as_})) 1114 1115 def transform_lookup( 1116 self, 1117 as_=Undefined, 1118 from_=Undefined, 1119 lookup=Undefined, 1120 default=Undefined, 1121 **kwargs, 1122 ): 1123 """Add a LookupTransform to the schema 1124 1125 Attributes 1126 ---------- 1127 as_ : anyOf(string, List(string)) 1128 The field or fields for storing the computed formula value. 1129 If ``from.fields`` is specified, the transform will use the same names for ``as``. 1130 If ``from.fields`` is not specified, ``as`` has to be a string and we put the whole 1131 object into the data under the specified name. 1132 from_ : :class:`LookupData` 1133 Secondary data reference. 1134 lookup : string 1135 Key in primary data source. 1136 default : string 1137 The default value to use if lookup fails. **Default value:** ``null`` 1138 1139 Returns 1140 ------- 1141 self : Chart object 1142 returns chart to allow for chaining 1143 1144 See Also 1145 -------- 1146 alt.LookupTransform : underlying transform object 1147 """ 1148 if as_ is not Undefined: 1149 if "as" in kwargs: 1150 raise ValueError( 1151 "transform_lookup: both 'as_' and 'as' passed as arguments." 1152 ) 1153 kwargs["as"] = as_ 1154 if from_ is not Undefined: 1155 if "from" in kwargs: 1156 raise ValueError( 1157 "transform_lookup: both 'from_' and 'from' passed as arguments." 1158 ) 1159 kwargs["from"] = from_ 1160 kwargs["lookup"] = lookup 1161 kwargs["default"] = default 1162 return self._add_transform(core.LookupTransform(**kwargs)) 1163 1164 def transform_sample(self, sample=1000): 1165 """ 1166 Add a SampleTransform to the schema. 1167 1168 Parameters 1169 ---------- 1170 sample : float 1171 The maximum number of data objects to include in the sample. Default: 1000. 1172 1173 Returns 1174 ------- 1175 self : Chart object 1176 returns chart to allow for chaining 1177 1178 See Also 1179 -------- 1180 alt.SampleTransform : underlying transform object 1181 """ 1182 return self._add_transform(core.SampleTransform(sample)) 1183 1184 def transform_stack(self, as_, stack, groupby, offset=Undefined, sort=Undefined): 1185 """ 1186 Add a StackTransform to the schema. 1187 1188 Parameters 1189 ---------- 1190 as_ : anyOf(string, List(string)) 1191 Output field names. This can be either a string or an array of strings with 1192 two elements denoting the name for the fields for stack start and stack end 1193 respectively. 1194 If a single string(eg."val") is provided, the end field will be "val_end". 1195 stack : string 1196 The field which is stacked. 1197 groupby : List(string) 1198 The data fields to group by. 1199 offset : enum('zero', 'center', 'normalize') 1200 Mode for stacking marks. Default: 'zero'. 1201 sort : List(:class:`SortField`) 1202 Field that determines the order of leaves in the stacked charts. 1203 1204 Returns 1205 ------- 1206 self : Chart object 1207 returns chart to allow for chaining 1208 1209 See Also 1210 -------- 1211 alt.StackTransform : underlying transform object 1212 """ 1213 return self._add_transform( 1214 core.StackTransform( 1215 stack=stack, groupby=groupby, offset=offset, sort=sort, **{"as": as_} 1216 ) 1217 ) 1218 1219 def transform_timeunit( 1220 self, as_=Undefined, field=Undefined, timeUnit=Undefined, **kwargs 1221 ): 1222 """ 1223 Add a TimeUnitTransform to the schema. 1224 1225 Parameters 1226 ---------- 1227 as_ : string 1228 The output field to write the timeUnit value. 1229 field : string 1230 The data field to apply time unit. 1231 timeUnit : :class:`TimeUnit` 1232 The timeUnit. 1233 **kwargs 1234 transforms can also be passed by keyword argument; see Examples 1235 1236 Returns 1237 ------- 1238 self : Chart object 1239 returns chart to allow for chaining 1240 1241 Examples 1242 -------- 1243 >>> import altair as alt 1244 >>> from altair import datum, expr 1245 1246 >>> chart = alt.Chart().transform_timeunit(month='month(date)') 1247 >>> chart.transform[0] 1248 TimeUnitTransform({ 1249 as: 'month', 1250 field: 'date', 1251 timeUnit: 'month' 1252 }) 1253 1254 It's also possible to pass the ``TimeUnitTransform`` arguments directly; 1255 this is most useful in cases where the desired field name is not a 1256 valid python identifier: 1257 1258 >>> kwds = {'as': 'month', 'timeUnit': 'month', 'field': 'The Month'} 1259 >>> chart = alt.Chart().transform_timeunit(**kwds) 1260 >>> chart.transform[0] 1261 TimeUnitTransform({ 1262 as: 'month', 1263 field: 'The Month', 1264 timeUnit: 'month' 1265 }) 1266 1267 As the first form is easier to write and understand, that is the 1268 recommended method. 1269 1270 See Also 1271 -------- 1272 alt.TimeUnitTransform : underlying transform object 1273 1274 """ 1275 if as_ is Undefined: 1276 as_ = kwargs.pop("as", Undefined) 1277 else: 1278 if "as" in kwargs: 1279 raise ValueError( 1280 "transform_timeunit: both 'as_' and 'as' passed as arguments." 1281 ) 1282 if as_ is not Undefined: 1283 dct = {"as": as_, "timeUnit": timeUnit, "field": field} 1284 self = self._add_transform(core.TimeUnitTransform(**dct)) 1285 for as_, shorthand in kwargs.items(): 1286 dct = utils.parse_shorthand( 1287 shorthand, 1288 parse_timeunits=True, 1289 parse_aggregates=False, 1290 parse_types=False, 1291 ) 1292 dct.pop("type", None) 1293 dct["as"] = as_ 1294 if "timeUnit" not in dct: 1295 raise ValueError("'{}' must include a valid timeUnit".format(shorthand)) 1296 self = self._add_transform(core.TimeUnitTransform(**dct)) 1297 return self 1298 1299 def transform_window( 1300 self, 1301 window=Undefined, 1302 frame=Undefined, 1303 groupby=Undefined, 1304 ignorePeers=Undefined, 1305 sort=Undefined, 1306 **kwargs, 1307 ): 1308 """Add a WindowTransform to the schema 1309 1310 Parameters 1311 ---------- 1312 window : List(:class:`WindowFieldDef`) 1313 The definition of the fields in the window, and what calculations to use. 1314 frame : List(anyOf(None, float)) 1315 A frame specification as a two-element array indicating how the sliding window 1316 should proceed. The array entries should either be a number indicating the offset 1317 from the current data object, or null to indicate unbounded rows preceding or 1318 following the current data object. The default value is ``[null, 0]``, indicating 1319 that the sliding window includes the current object and all preceding objects. The 1320 value ``[-5, 5]`` indicates that the window should include five objects preceding 1321 and five objects following the current object. Finally, ``[null, null]`` indicates 1322 that the window frame should always include all data objects. The only operators 1323 affected are the aggregation operations and the ``first_value``, ``last_value``, and 1324 ``nth_value`` window operations. The other window operations are not affected by 1325 this. 1326 1327 **Default value:** : ``[null, 0]`` (includes the current object and all preceding 1328 objects) 1329 groupby : List(string) 1330 The data fields for partitioning the data objects into separate windows. If 1331 unspecified, all data points will be in a single group. 1332 ignorePeers : boolean 1333 Indicates if the sliding window frame should ignore peer values. (Peer values are 1334 those considered identical by the sort criteria). The default is false, causing the 1335 window frame to expand to include all peer values. If set to true, the window frame 1336 will be defined by offset values only. This setting only affects those operations 1337 that depend on the window frame, namely aggregation operations and the first_value, 1338 last_value, and nth_value window operations. 1339 1340 **Default value:** ``false`` 1341 sort : List(:class:`SortField`) 1342 A sort field definition for sorting data objects within a window. If two data 1343 objects are considered equal by the comparator, they are considered “peer” values of 1344 equal rank. If sort is not specified, the order is undefined: data objects are 1345 processed in the order they are observed and none are considered peers (the 1346 ignorePeers parameter is ignored and treated as if set to ``true`` ). 1347 **kwargs 1348 transforms can also be passed by keyword argument; see Examples 1349 1350 Examples 1351 -------- 1352 A cumulative line chart 1353 1354 >>> import altair as alt 1355 >>> import numpy as np 1356 >>> import pandas as pd 1357 >>> data = pd.DataFrame({'x': np.arange(100), 1358 ... 'y': np.random.randn(100)}) 1359 >>> chart = alt.Chart(data).mark_line().encode( 1360 ... x='x:Q', 1361 ... y='ycuml:Q' 1362 ... ).transform_window( 1363 ... ycuml='sum(y)' 1364 ... ) 1365 >>> chart.transform[0] 1366 WindowTransform({ 1367 window: [WindowFieldDef({ 1368 as: 'ycuml', 1369 field: 'y', 1370 op: 'sum' 1371 })] 1372 }) 1373 1374 """ 1375 if kwargs: 1376 if window is Undefined: 1377 window = [] 1378 for as_, shorthand in kwargs.items(): 1379 kwds = {"as": as_} 1380 kwds.update( 1381 utils.parse_shorthand( 1382 shorthand, 1383 parse_aggregates=False, 1384 parse_window_ops=True, 1385 parse_timeunits=False, 1386 parse_types=False, 1387 ) 1388 ) 1389 window.append(core.WindowFieldDef(**kwds)) 1390 1391 return self._add_transform( 1392 core.WindowTransform( 1393 window=window, 1394 frame=frame, 1395 groupby=groupby, 1396 ignorePeers=ignorePeers, 1397 sort=sort, 1398 ) 1399 ) 1400 1401 # Display-related methods 1402 1403 def _repr_mimebundle_(self, include=None, exclude=None): 1404 """Return a MIME bundle for display in Jupyter frontends.""" 1405 # Catch errors explicitly to get around issues in Jupyter frontend 1406 # see https://github.com/ipython/ipython/issues/11038 1407 try: 1408 dct = self.to_dict() 1409 except Exception: 1410 utils.display_traceback(in_ipython=True) 1411 return {} 1412 else: 1413 return renderers.get()(dct) 1414 1415 def display(self, renderer=Undefined, theme=Undefined, actions=Undefined, **kwargs): 1416 """Display chart in Jupyter notebook or JupyterLab 1417 1418 Parameters are passed as options to vega-embed within supported frontends. 1419 See https://github.com/vega/vega-embed#options for details. 1420 1421 Parameters 1422 ---------- 1423 renderer : string ('canvas' or 'svg') 1424 The renderer to use 1425 theme : string 1426 The Vega theme name to use; see https://github.com/vega/vega-themes 1427 actions : bool or dict 1428 Specify whether action links ("Open In Vega Editor", etc.) are 1429 included in the view. 1430 **kwargs : 1431 Additional parameters are also passed to vega-embed as options. 1432 1433 """ 1434 from IPython.display import display 1435 1436 if renderer is not Undefined: 1437 kwargs["renderer"] = renderer 1438 if theme is not Undefined: 1439 kwargs["theme"] = theme 1440 if actions is not Undefined: 1441 kwargs["actions"] = actions 1442 1443 if kwargs: 1444 options = renderers.options.copy() 1445 options["embed_options"] = options.get("embed_options", {}).copy() 1446 options["embed_options"].update(kwargs) 1447 with renderers.enable(**options): 1448 display(self) 1449 else: 1450 display(self) 1451 1452 def serve( 1453 self, 1454 ip="127.0.0.1", 1455 port=8888, 1456 n_retries=50, 1457 files=None, 1458 jupyter_warning=True, 1459 open_browser=True, 1460 http_server=None, 1461 **kwargs, 1462 ): 1463 """Open a browser window and display a rendering of the chart 1464 1465 Parameters 1466 ---------- 1467 html : string 1468 HTML to serve 1469 ip : string (default = '127.0.0.1') 1470 ip address at which the HTML will be served. 1471 port : int (default = 8888) 1472 the port at which to serve the HTML 1473 n_retries : int (default = 50) 1474 the number of nearby ports to search if the specified port 1475 is already in use. 1476 files : dictionary (optional) 1477 dictionary of extra content to serve 1478 jupyter_warning : bool (optional) 1479 if True (default), then print a warning if this is used 1480 within the Jupyter notebook 1481 open_browser : bool (optional) 1482 if True (default), then open a web browser to the given HTML 1483 http_server : class (optional) 1484 optionally specify an HTTPServer class to use for showing the 1485 figure. The default is Python's basic HTTPServer. 1486 **kwargs : 1487 additional keyword arguments passed to the save() method 1488 1489 """ 1490 from ...utils.server import serve 1491 1492 html = io.StringIO() 1493 self.save(html, format="html", **kwargs) 1494 html.seek(0) 1495 1496 serve( 1497 html.read(), 1498 ip=ip, 1499 port=port, 1500 n_retries=n_retries, 1501 files=files, 1502 jupyter_warning=jupyter_warning, 1503 open_browser=open_browser, 1504 http_server=http_server, 1505 ) 1506 1507 @utils.use_signature(core.Resolve) 1508 def _set_resolve(self, **kwargs): 1509 """Copy the chart and update the resolve property with kwargs""" 1510 if not hasattr(self, "resolve"): 1511 raise ValueError( 1512 "{} object has no attribute " "'resolve'".format(self.__class__) 1513 ) 1514 copy = self.copy(deep=["resolve"]) 1515 if copy.resolve is Undefined: 1516 copy.resolve = core.Resolve() 1517 for key, val in kwargs.items(): 1518 copy.resolve[key] = val 1519 return copy 1520 1521 @utils.use_signature(core.AxisResolveMap) 1522 def resolve_axis(self, *args, **kwargs): 1523 return self._set_resolve(axis=core.AxisResolveMap(*args, **kwargs)) 1524 1525 @utils.use_signature(core.LegendResolveMap) 1526 def resolve_legend(self, *args, **kwargs): 1527 return self._set_resolve(legend=core.LegendResolveMap(*args, **kwargs)) 1528 1529 @utils.use_signature(core.ScaleResolveMap) 1530 def resolve_scale(self, *args, **kwargs): 1531 return self._set_resolve(scale=core.ScaleResolveMap(*args, **kwargs)) 1532 1533 1534class _EncodingMixin(object): 1535 @utils.use_signature(core.FacetedEncoding) 1536 def encode(self, *args, **kwargs): 1537 # Convert args to kwargs based on their types. 1538 kwargs = utils.infer_encoding_types(args, kwargs, channels) 1539 1540 # get a copy of the dict representation of the previous encoding 1541 copy = self.copy(deep=["encoding"]) 1542 encoding = copy._get("encoding", {}) 1543 if isinstance(encoding, core.VegaLiteSchema): 1544 encoding = {k: v for k, v in encoding._kwds.items() if v is not Undefined} 1545 1546 # update with the new encodings, and apply them to the copy 1547 encoding.update(kwargs) 1548 copy.encoding = core.FacetedEncoding(**encoding) 1549 return copy 1550 1551 def facet( 1552 self, 1553 facet=Undefined, 1554 row=Undefined, 1555 column=Undefined, 1556 data=Undefined, 1557 columns=Undefined, 1558 **kwargs, 1559 ): 1560 """Create a facet chart from the current chart. 1561 1562 Faceted charts require data to be specified at the top level; if data 1563 is not specified, the data from the current chart will be used at the 1564 top level. 1565 1566 Parameters 1567 ---------- 1568 facet : string or alt.Facet (optional) 1569 The data column to use as an encoding for a wrapped facet. 1570 If specified, then neither row nor column may be specified. 1571 column : string or alt.Column (optional) 1572 The data column to use as an encoding for a column facet. 1573 May be combined with row argument, but not with facet argument. 1574 row : string or alt.Column (optional) 1575 The data column to use as an encoding for a row facet. 1576 May be combined with column argument, but not with facet argument. 1577 data : string or dataframe (optional) 1578 The dataset to use for faceting. If not supplied, then data must 1579 be specified in the top-level chart that calls this method. 1580 columns : integer 1581 the maximum number of columns for a wrapped facet. 1582 1583 Returns 1584 ------- 1585 self : 1586 for chaining 1587 """ 1588 facet_specified = facet is not Undefined 1589 rowcol_specified = row is not Undefined or column is not Undefined 1590 1591 if facet_specified and rowcol_specified: 1592 raise ValueError( 1593 "facet argument cannot be combined with row/column argument." 1594 ) 1595 1596 if data is Undefined: 1597 if self.data is Undefined: 1598 raise ValueError( 1599 "Facet charts require data to be specified at the top level." 1600 ) 1601 self = self.copy(deep=False) 1602 data, self.data = self.data, Undefined 1603 1604 if facet_specified: 1605 if isinstance(facet, str): 1606 facet = channels.Facet(facet) 1607 else: 1608 facet = FacetMapping(row=row, column=column) 1609 1610 return FacetChart(spec=self, facet=facet, data=data, columns=columns, **kwargs) 1611 1612 1613class Chart( 1614 TopLevelMixin, _EncodingMixin, mixins.MarkMethodMixin, core.TopLevelUnitSpec 1615): 1616 """Create a basic Altair/Vega-Lite chart. 1617 1618 Although it is possible to set all Chart properties as constructor attributes, 1619 it is more idiomatic to use methods such as ``mark_point()``, ``encode()``, 1620 ``transform_filter()``, ``properties()``, etc. See Altair's documentation 1621 for details and examples: http://altair-viz.github.io/. 1622 1623 Attributes 1624 ---------- 1625 data : Data 1626 An object describing the data source 1627 mark : AnyMark 1628 A string describing the mark type (one of `"bar"`, `"circle"`, `"square"`, `"tick"`, 1629 `"line"`, * `"area"`, `"point"`, `"rule"`, `"geoshape"`, and `"text"`) or a 1630 MarkDef object. 1631 encoding : FacetedEncoding 1632 A key-value mapping between encoding channels and definition of fields. 1633 autosize : anyOf(AutosizeType, AutoSizeParams) 1634 Sets how the visualization size should be determined. If a string, should be one of 1635 `"pad"`, `"fit"` or `"none"`. Object values can additionally specify parameters for 1636 content sizing and automatic resizing. `"fit"` is only supported for single and 1637 layered views that don't use `rangeStep`. __Default value__: `pad` 1638 background : string 1639 CSS color property to use as the background of visualization. 1640 1641 **Default value:** none (transparent) 1642 config : Config 1643 Vega-Lite configuration object. This property can only be defined at the top-level 1644 of a specification. 1645 description : string 1646 Description of this mark for commenting purpose. 1647 height : float 1648 The height of a visualization. 1649 name : string 1650 Name of the visualization for later reference. 1651 padding : Padding 1652 The default visualization padding, in pixels, from the edge of the visualization 1653 canvas to the data rectangle. If a number, specifies padding for all sides. If an 1654 object, the value should have the format `{"left": 5, "top": 5, "right": 5, 1655 "bottom": 5}` to specify padding for each side of the visualization. __Default 1656 value__: `5` 1657 projection : Projection 1658 An object defining properties of geographic projection. Works with `"geoshape"` 1659 marks and `"point"` or `"line"` marks that have a channel (one or more of `"X"`, 1660 `"X2"`, `"Y"`, `"Y2"`) with type `"latitude"`, or `"longitude"`. 1661 selection : Mapping(required=[]) 1662 A key-value mapping between selection names and definitions. 1663 title : anyOf(string, TitleParams) 1664 Title for the plot. 1665 transform : List(Transform) 1666 An array of data transformations such as filter and new field calculation. 1667 width : float 1668 The width of a visualization. 1669 """ 1670 1671 def __init__( 1672 self, 1673 data=Undefined, 1674 encoding=Undefined, 1675 mark=Undefined, 1676 width=Undefined, 1677 height=Undefined, 1678 **kwargs, 1679 ): 1680 super(Chart, self).__init__( 1681 data=data, 1682 encoding=encoding, 1683 mark=mark, 1684 width=width, 1685 height=height, 1686 **kwargs, 1687 ) 1688 1689 @classmethod 1690 def from_dict(cls, dct, validate=True): 1691 """Construct class from a dictionary representation 1692 1693 Parameters 1694 ---------- 1695 dct : dictionary 1696 The dict from which to construct the class 1697 validate : boolean 1698 If True (default), then validate the input against the schema. 1699 1700 Returns 1701 ------- 1702 obj : Chart object 1703 The wrapped schema 1704 1705 Raises 1706 ------ 1707 jsonschema.ValidationError : 1708 if validate=True and dct does not conform to the schema 1709 """ 1710 for class_ in TopLevelMixin.__subclasses__(): 1711 if class_ is Chart: 1712 class_ = super(Chart, cls) 1713 try: 1714 return class_.from_dict(dct, validate=validate) 1715 except jsonschema.ValidationError: 1716 pass 1717 1718 # As a last resort, try using the Root vegalite object 1719 return core.Root.from_dict(dct, validate) 1720 1721 def add_selection(self, *selections): 1722 """Add one or more selections to the chart.""" 1723 if not selections: 1724 return self 1725 copy = self.copy(deep=["selection"]) 1726 if copy.selection is Undefined: 1727 copy.selection = {} 1728 1729 for s in selections: 1730 copy.selection[s.name] = s.selection 1731 return copy 1732 1733 def interactive(self, name=None, bind_x=True, bind_y=True): 1734 """Make chart axes scales interactive 1735 1736 Parameters 1737 ---------- 1738 name : string 1739 The selection name to use for the axes scales. This name should be 1740 unique among all selections within the chart. 1741 bind_x : boolean, default True 1742 If true, then bind the interactive scales to the x-axis 1743 bind_y : boolean, default True 1744 If true, then bind the interactive scales to the y-axis 1745 1746 Returns 1747 ------- 1748 chart : 1749 copy of self, with interactive axes added 1750 1751 """ 1752 encodings = [] 1753 if bind_x: 1754 encodings.append("x") 1755 if bind_y: 1756 encodings.append("y") 1757 return self.add_selection( 1758 selection_interval(bind="scales", encodings=encodings) 1759 ) 1760 1761 1762def _check_if_valid_subspec(spec, classname): 1763 """Check if the spec is a valid sub-spec. 1764 1765 If it is not, then raise a ValueError 1766 """ 1767 err = ( 1768 'Objects with "{0}" attribute cannot be used within {1}. ' 1769 "Consider defining the {0} attribute in the {1} object instead." 1770 ) 1771 1772 if not isinstance(spec, (core.SchemaBase, dict)): 1773 raise ValueError("Only chart objects can be used in {0}.".format(classname)) 1774 for attr in TOPLEVEL_ONLY_KEYS: 1775 if isinstance(spec, core.SchemaBase): 1776 val = getattr(spec, attr, Undefined) 1777 else: 1778 val = spec.get(attr, Undefined) 1779 if val is not Undefined: 1780 raise ValueError(err.format(attr, classname)) 1781 1782 1783def _check_if_can_be_layered(spec): 1784 """Check if the spec can be layered.""" 1785 1786 def _get(spec, attr): 1787 if isinstance(spec, core.SchemaBase): 1788 return spec._get(attr) 1789 else: 1790 return spec.get(attr, Undefined) 1791 1792 encoding = _get(spec, "encoding") 1793 if encoding is not Undefined: 1794 for channel in ["row", "column", "facet"]: 1795 if _get(encoding, channel) is not Undefined: 1796 raise ValueError("Faceted charts cannot be layered.") 1797 if isinstance(spec, (Chart, LayerChart)): 1798 return 1799 1800 if not isinstance(spec, (core.SchemaBase, dict)): 1801 raise ValueError("Only chart objects can be layered.") 1802 if _get(spec, "facet") is not Undefined: 1803 raise ValueError("Faceted charts cannot be layered.") 1804 if isinstance(spec, FacetChart) or _get(spec, "facet") is not Undefined: 1805 raise ValueError("Faceted charts cannot be layered.") 1806 if isinstance(spec, RepeatChart) or _get(spec, "repeat") is not Undefined: 1807 raise ValueError("Repeat charts cannot be layered.") 1808 if isinstance(spec, ConcatChart) or _get(spec, "concat") is not Undefined: 1809 raise ValueError("Concatenated charts cannot be layered.") 1810 if isinstance(spec, HConcatChart) or _get(spec, "hconcat") is not Undefined: 1811 raise ValueError("Concatenated charts cannot be layered.") 1812 if isinstance(spec, VConcatChart) or _get(spec, "vconcat") is not Undefined: 1813 raise ValueError("Concatenated charts cannot be layered.") 1814 1815 1816@utils.use_signature(core.TopLevelRepeatSpec) 1817class RepeatChart(TopLevelMixin, core.TopLevelRepeatSpec): 1818 """A chart repeated across rows and columns with small changes""" 1819 1820 def __init__(self, data=Undefined, spec=Undefined, repeat=Undefined, **kwargs): 1821 _check_if_valid_subspec(spec, "RepeatChart") 1822 super(RepeatChart, self).__init__(data=data, spec=spec, repeat=repeat, **kwargs) 1823 1824 def interactive(self, name=None, bind_x=True, bind_y=True): 1825 """Make chart axes scales interactive 1826 1827 Parameters 1828 ---------- 1829 name : string 1830 The selection name to use for the axes scales. This name should be 1831 unique among all selections within the chart. 1832 bind_x : boolean, default True 1833 If true, then bind the interactive scales to the x-axis 1834 bind_y : boolean, default True 1835 If true, then bind the interactive scales to the y-axis 1836 1837 Returns 1838 ------- 1839 chart : 1840 copy of self, with interactive axes added 1841 1842 """ 1843 copy = self.copy(deep=False) 1844 copy.spec = copy.spec.interactive(name=name, bind_x=bind_x, bind_y=bind_y) 1845 return copy 1846 1847 def add_selection(self, *selections): 1848 """Add one or more selections to the chart.""" 1849 if not selections or self.spec is Undefined: 1850 return self 1851 copy = self.copy() 1852 copy.spec = copy.spec.add_selection(*selections) 1853 return copy 1854 1855 1856def repeat(repeater="repeat"): 1857 """Tie a channel to the row or column within a repeated chart 1858 1859 The output of this should be passed to the ``field`` attribute of 1860 a channel. 1861 1862 Parameters 1863 ---------- 1864 repeater : {'row'|'column'|'repeat'} 1865 The repeater to tie the field to. Default is 'repeat'. 1866 1867 Returns 1868 ------- 1869 repeat : RepeatRef object 1870 """ 1871 if repeater not in ["row", "column", "repeat"]: 1872 raise ValueError("repeater must be one of ['row', 'column', 'repeat']") 1873 return core.RepeatRef(repeat=repeater) 1874 1875 1876@utils.use_signature(core.TopLevelConcatSpec) 1877class ConcatChart(TopLevelMixin, core.TopLevelConcatSpec): 1878 """A chart with horizontally-concatenated facets""" 1879 1880 def __init__(self, data=Undefined, concat=(), columns=Undefined, **kwargs): 1881 # TODO: move common data to top level? 1882 for spec in concat: 1883 _check_if_valid_subspec(spec, "ConcatChart") 1884 super(ConcatChart, self).__init__( 1885 data=data, concat=list(concat), columns=columns, **kwargs 1886 ) 1887 self.data, self.concat = _combine_subchart_data(self.data, self.concat) 1888 1889 def __ior__(self, other): 1890 _check_if_valid_subspec(other, "ConcatChart") 1891 self.concat.append(other) 1892 self.data, self.concat = _combine_subchart_data(self.data, self.concat) 1893 return self 1894 1895 def __or__(self, other): 1896 copy = self.copy(deep=["concat"]) 1897 copy |= other 1898 return copy 1899 1900 def add_selection(self, *selections): 1901 """Add one or more selections to all subcharts.""" 1902 if not selections or not self.concat: 1903 return self 1904 copy = self.copy() 1905 copy.concat = [chart.add_selection(*selections) for chart in copy.concat] 1906 return copy 1907 1908 1909def concat(*charts, **kwargs): 1910 """Concatenate charts horizontally""" 1911 return ConcatChart(concat=charts, **kwargs) 1912 1913 1914@utils.use_signature(core.TopLevelHConcatSpec) 1915class HConcatChart(TopLevelMixin, core.TopLevelHConcatSpec): 1916 """A chart with horizontally-concatenated facets""" 1917 1918 def __init__(self, data=Undefined, hconcat=(), **kwargs): 1919 # TODO: move common data to top level? 1920 for spec in hconcat: 1921 _check_if_valid_subspec(spec, "HConcatChart") 1922 super(HConcatChart, self).__init__(data=data, hconcat=list(hconcat), **kwargs) 1923 self.data, self.hconcat = _combine_subchart_data(self.data, self.hconcat) 1924 1925 def __ior__(self, other): 1926 _check_if_valid_subspec(other, "HConcatChart") 1927 self.hconcat.append(other) 1928 self.data, self.hconcat = _combine_subchart_data(self.data, self.hconcat) 1929 return self 1930 1931 def __or__(self, other): 1932 copy = self.copy(deep=["hconcat"]) 1933 copy |= other 1934 return copy 1935 1936 def add_selection(self, *selections): 1937 """Add one or more selections to all subcharts.""" 1938 if not selections or not self.hconcat: 1939 return self 1940 copy = self.copy() 1941 copy.hconcat = [chart.add_selection(*selections) for chart in copy.hconcat] 1942 return copy 1943 1944 1945def hconcat(*charts, **kwargs): 1946 """Concatenate charts horizontally""" 1947 return HConcatChart(hconcat=charts, **kwargs) 1948 1949 1950@utils.use_signature(core.TopLevelVConcatSpec) 1951class VConcatChart(TopLevelMixin, core.TopLevelVConcatSpec): 1952 """A chart with vertically-concatenated facets""" 1953 1954 def __init__(self, data=Undefined, vconcat=(), **kwargs): 1955 # TODO: move common data to top level? 1956 for spec in vconcat: 1957 _check_if_valid_subspec(spec, "VConcatChart") 1958 super(VConcatChart, self).__init__(data=data, vconcat=list(vconcat), **kwargs) 1959 self.data, self.vconcat = _combine_subchart_data(self.data, self.vconcat) 1960 1961 def __iand__(self, other): 1962 _check_if_valid_subspec(other, "VConcatChart") 1963 self.vconcat.append(other) 1964 self.data, self.vconcat = _combine_subchart_data(self.data, self.vconcat) 1965 return self 1966 1967 def __and__(self, other): 1968 copy = self.copy(deep=["vconcat"]) 1969 copy &= other 1970 return copy 1971 1972 def add_selection(self, *selections): 1973 """Add one or more selections to all subcharts.""" 1974 if not selections or not self.vconcat: 1975 return self 1976 copy = self.copy() 1977 copy.vconcat = [chart.add_selection(*selections) for chart in copy.vconcat] 1978 return copy 1979 1980 1981def vconcat(*charts, **kwargs): 1982 """Concatenate charts vertically""" 1983 return VConcatChart(vconcat=charts, **kwargs) 1984 1985 1986@utils.use_signature(core.TopLevelLayerSpec) 1987class LayerChart(TopLevelMixin, _EncodingMixin, core.TopLevelLayerSpec): 1988 """A Chart with layers within a single panel""" 1989 1990 def __init__(self, data=Undefined, layer=(), **kwargs): 1991 # TODO: move common data to top level? 1992 # TODO: check for conflicting interaction 1993 for spec in layer: 1994 _check_if_valid_subspec(spec, "LayerChart") 1995 _check_if_can_be_layered(spec) 1996 super(LayerChart, self).__init__(data=data, layer=list(layer), **kwargs) 1997 self.data, self.layer = _combine_subchart_data(self.data, self.layer) 1998 1999 def __iadd__(self, other): 2000 _check_if_valid_subspec(other, "LayerChart") 2001 _check_if_can_be_layered(other) 2002 self.layer.append(other) 2003 self.data, self.layer = _combine_subchart_data(self.data, self.layer) 2004 return self 2005 2006 def __add__(self, other): 2007 copy = self.copy(deep=["layer"]) 2008 copy += other 2009 return copy 2010 2011 def add_layers(self, *layers): 2012 copy = self.copy(deep=["layer"]) 2013 for layer in layers: 2014 copy += layer 2015 return copy 2016 2017 def interactive(self, name=None, bind_x=True, bind_y=True): 2018 """Make chart axes scales interactive 2019 2020 Parameters 2021 ---------- 2022 name : string 2023 The selection name to use for the axes scales. This name should be 2024 unique among all selections within the chart. 2025 bind_x : boolean, default True 2026 If true, then bind the interactive scales to the x-axis 2027 bind_y : boolean, default True 2028 If true, then bind the interactive scales to the y-axis 2029 2030 Returns 2031 ------- 2032 chart : 2033 copy of self, with interactive axes added 2034 2035 """ 2036 if not self.layer: 2037 raise ValueError( 2038 "LayerChart: cannot call interactive() until a " "layer is defined" 2039 ) 2040 copy = self.copy(deep=["layer"]) 2041 copy.layer[0] = copy.layer[0].interactive( 2042 name=name, bind_x=bind_x, bind_y=bind_y 2043 ) 2044 return copy 2045 2046 def add_selection(self, *selections): 2047 """Add one or more selections to all subcharts.""" 2048 if not selections or not self.layer: 2049 return self 2050 copy = self.copy() 2051 copy.layer[0] = copy.layer[0].add_selection(*selections) 2052 return copy 2053 2054 2055def layer(*charts, **kwargs): 2056 """layer multiple charts""" 2057 return LayerChart(layer=charts, **kwargs) 2058 2059 2060@utils.use_signature(core.TopLevelFacetSpec) 2061class FacetChart(TopLevelMixin, core.TopLevelFacetSpec): 2062 """A Chart with layers within a single panel""" 2063 2064 def __init__(self, data=Undefined, spec=Undefined, facet=Undefined, **kwargs): 2065 _check_if_valid_subspec(spec, "FacetChart") 2066 super(FacetChart, self).__init__(data=data, spec=spec, facet=facet, **kwargs) 2067 2068 def interactive(self, name=None, bind_x=True, bind_y=True): 2069 """Make chart axes scales interactive 2070 2071 Parameters 2072 ---------- 2073 name : string 2074 The selection name to use for the axes scales. This name should be 2075 unique among all selections within the chart. 2076 bind_x : boolean, default True 2077 If true, then bind the interactive scales to the x-axis 2078 bind_y : boolean, default True 2079 If true, then bind the interactive scales to the y-axis 2080 2081 Returns 2082 ------- 2083 chart : 2084 copy of self, with interactive axes added 2085 2086 """ 2087 copy = self.copy(deep=False) 2088 copy.spec = copy.spec.interactive(name=name, bind_x=bind_x, bind_y=bind_y) 2089 return copy 2090 2091 def add_selection(self, *selections): 2092 """Add one or more selections to the chart.""" 2093 if not selections or self.spec is Undefined: 2094 return self 2095 copy = self.copy() 2096 copy.spec = copy.spec.add_selection(*selections) 2097 return copy 2098 2099 2100def topo_feature(url, feature, **kwargs): 2101 """A convenience function for extracting features from a topojson url 2102 2103 Parameters 2104 ---------- 2105 url : string 2106 An URL from which to load the data set. 2107 2108 feature : string 2109 The name of the TopoJSON object set to convert to a GeoJSON feature collection. For 2110 example, in a map of the world, there may be an object set named `"countries"`. 2111 Using the feature property, we can extract this set and generate a GeoJSON feature 2112 object for each country. 2113 2114 **kwargs : 2115 additional keywords passed to TopoDataFormat 2116 """ 2117 return core.UrlData( 2118 url=url, format=core.TopoDataFormat(type="topojson", feature=feature, **kwargs) 2119 ) 2120 2121 2122def _combine_subchart_data(data, subcharts): 2123 def remove_data(subchart): 2124 if subchart.data is not Undefined: 2125 subchart = subchart.copy() 2126 subchart.data = Undefined 2127 return subchart 2128 2129 if not subcharts: 2130 # No subcharts = nothing to do. 2131 pass 2132 elif data is Undefined: 2133 # Top level has no data; all subchart data must 2134 # be identical to proceed. 2135 subdata = subcharts[0].data 2136 if subdata is not Undefined and all(c.data is subdata for c in subcharts): 2137 data = subdata 2138 subcharts = [remove_data(c) for c in subcharts] 2139 else: 2140 # Top level has data; subchart data must be either 2141 # undefined or identical to proceed. 2142 if all(c.data is Undefined or c.data is data for c in subcharts): 2143 subcharts = [remove_data(c) for c in subcharts] 2144 2145 return data, subcharts 2146 2147 2148@utils.use_signature(core.SequenceParams) 2149def sequence(start, stop=None, step=Undefined, as_=Undefined, **kwds): 2150 """Sequence generator.""" 2151 if stop is None: 2152 start, stop = 0, start 2153 params = core.SequenceParams(start=start, stop=stop, step=step, **{"as": as_}) 2154 return core.SequenceGenerator(sequence=params, **kwds) 2155 2156 2157@utils.use_signature(core.GraticuleParams) 2158def graticule(**kwds): 2159 """Graticule generator.""" 2160 if not kwds: 2161 # graticule: True indicates default parameters 2162 graticule = True 2163 else: 2164 graticule = core.GraticuleParams(**kwds) 2165 return core.GraticuleGenerator(graticule=graticule) 2166 2167 2168def sphere(): 2169 """Sphere generator.""" 2170 return core.SphereGenerator(sphere=True) 2171