Skip to content

Commit

Permalink
Merge inbound to mozilla-central. a=merge
Browse files Browse the repository at this point in the history
  • Loading branch information
shindli committed Jun 21, 2019
2 parents 0be95c4 + 673d61d commit a78e233
Show file tree
Hide file tree
Showing 48 changed files with 741 additions and 236 deletions.
45 changes: 40 additions & 5 deletions gfx/layers/apz/src/AsyncPanZoomController.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1822,8 +1822,8 @@ nsEventStatus AsyncPanZoomController::OnScaleEnd(
}

nsEventStatus AsyncPanZoomController::HandleEndOfPan() {
MOZ_ASSERT(GetCurrentTouchBlock());
GetCurrentTouchBlock()->GetOverscrollHandoffChain()->FlushRepaints();
MOZ_ASSERT(GetCurrentTouchBlock() || GetCurrentPanGestureBlock());
GetCurrentInputBlock()->GetOverscrollHandoffChain()->FlushRepaints();
ParentLayerPoint flingVelocity = GetVelocityVector();

// Clear our velocities; if DispatchFling() gives the fling to us,
Expand All @@ -1845,7 +1845,7 @@ nsEventStatus AsyncPanZoomController::HandleEndOfPan() {
if (flingVelocity.Length() < StaticPrefs::APZFlingMinVelocityThreshold()) {
// Relieve overscroll now if needed, since we will not transition to a fling
// animation and then an overscroll animation, and relieve it then.
GetCurrentTouchBlock()
GetCurrentInputBlock()
->GetOverscrollHandoffChain()
->SnapBackOverscrolledApzc(this);
return nsEventStatus_eConsumeNoDefault;
Expand All @@ -1856,8 +1856,8 @@ nsEventStatus AsyncPanZoomController::HandleEndOfPan() {
// which nulls out mTreeManager, could be called concurrently.
if (APZCTreeManager* treeManagerLocal = GetApzcTreeManager()) {
const FlingHandoffState handoffState{
flingVelocity, GetCurrentTouchBlock()->GetOverscrollHandoffChain(),
false /* not handoff */, GetCurrentTouchBlock()->GetScrolledApzc()};
flingVelocity, GetCurrentInputBlock()->GetOverscrollHandoffChain(),
false /* not handoff */, GetCurrentInputBlock()->GetScrolledApzc()};
treeManagerLocal->DispatchFling(this, handoffState);
}
return nsEventStatus_eConsumeNoDefault;
Expand Down Expand Up @@ -2556,6 +2556,35 @@ nsEventStatus AsyncPanZoomController::OnPan(const PanGestureInput& aEvent,
ScreenPoint physicalPanDisplacement = aEvent.mPanDisplacement;
ParentLayerPoint logicalPanDisplacement =
aEvent.UserMultipliedLocalPanDisplacement();
if (aEvent.mDeltaType == PanGestureInput::PANDELTA_PAGE) {
// Pan events with page units are used by Gtk, so this replicates Gtk:
// https://gitlab.gnome.org/GNOME/gtk/blob/c734c7e9188b56f56c3a504abee05fa40c5475ac/gtk/gtkrange.c#L3065-3073
CSSSize pageScrollSize;
CSSToParentLayerScale2D zoom;
{
// Grab the lock to access the frame metrics.
RecursiveMutexAutoLock lock(mRecursiveMutex);
pageScrollSize = mScrollMetadata.GetPageScrollAmount() /
Metrics().GetDevPixelsPerCSSPixel();
zoom = Metrics().GetZoom();
}
// scrollUnit* is in units of "ParentLayer pixels per page proportion"...
auto scrollUnitWidth = std::min(std::pow(pageScrollSize.width, 2.0 / 3.0),
pageScrollSize.width / 2.0) *
zoom.xScale;
auto scrollUnitHeight = std::min(std::pow(pageScrollSize.height, 2.0 / 3.0),
pageScrollSize.height / 2.0) *
zoom.yScale;
// ... and pan displacements are in units of "page proportion count"
// here, so the products of them and scrollUnit* are in ParentLayer pixels
ParentLayerPoint physicalPanDisplacementPL(
physicalPanDisplacement.x * scrollUnitWidth,
physicalPanDisplacement.y * scrollUnitHeight);
physicalPanDisplacement = ToScreenCoordinates(physicalPanDisplacementPL,
aEvent.mLocalPanStartPoint);
logicalPanDisplacement.x *= scrollUnitWidth;
logicalPanDisplacement.y *= scrollUnitHeight;
}

MOZ_ASSERT(GetCurrentPanGestureBlock());
AdjustDeltaForAllowedScrollDirections(
Expand Down Expand Up @@ -2609,6 +2638,12 @@ nsEventStatus AsyncPanZoomController::OnPanEnd(const PanGestureInput& aEvent) {
mX.EndTouch(aEvent.mTime);
mY.EndTouch(aEvent.mTime);

// Use HandleEndOfPan for fling on platforms that don't
// emit momentum events (Gtk).
if (aEvent.mSimulateMomentum) {
return HandleEndOfPan();
}

// Drop any velocity on axes where we don't have room to scroll anyways
// (in this APZC, or an APZC further in the handoff chain).
// This ensures that we don't enlarge the display port unnecessarily.
Expand Down
19 changes: 0 additions & 19 deletions gfx/layers/ipc/LayersMessageUtils.h
Original file line number Diff line number Diff line change
Expand Up @@ -167,25 +167,6 @@ struct ParamTraits<mozilla::layers::CompositableHandle> {
}
};

// Helper class for reading bitfields.
// If T has bitfields members, derive ParamTraits<T> from BitfieldHelper<T>.
template <typename ParamType>
struct BitfieldHelper {
// We need this helper because we can't get the address of a bitfield to
// pass directly to ReadParam. So instead we read it into a temporary bool
// and set the bitfield using a setter function
static bool ReadBoolForBitfield(const Message* aMsg, PickleIterator* aIter,
ParamType* aResult,
void (ParamType::*aSetter)(bool)) {
bool value;
if (ReadParam(aMsg, aIter, &value)) {
(aResult->*aSetter)(value);
return true;
}
return false;
}
};

template <>
struct ParamTraits<mozilla::layers::FrameMetrics>
: BitfieldHelper<mozilla::layers::FrameMetrics> {
Expand Down
19 changes: 19 additions & 0 deletions ipc/glue/IPCMessageUtils.h
Original file line number Diff line number Diff line change
Expand Up @@ -1088,6 +1088,25 @@ struct ParamTraits<nsILoadInfo::CrossOriginPolicy>
: EnumSerializer<nsILoadInfo::CrossOriginPolicy,
CrossOriginPolicyValidator> {};

// Helper class for reading bitfields.
// If T has bitfields members, derive ParamTraits<T> from BitfieldHelper<T>.
template <typename ParamType>
struct BitfieldHelper {
// We need this helper because we can't get the address of a bitfield to
// pass directly to ReadParam. So instead we read it into a temporary bool
// and set the bitfield using a setter function
static bool ReadBoolForBitfield(const Message* aMsg, PickleIterator* aIter,
ParamType* aResult,
void (ParamType::*aSetter)(bool)) {
bool value;
if (ReadParam(aMsg, aIter, &value)) {
(aResult->*aSetter)(value);
return true;
}
return false;
}
};

} /* namespace IPC */

#endif /* __IPC_GLUE_IPCMESSAGEUTILS_H__ */
9 changes: 8 additions & 1 deletion js/src/gc/GCEnum.h
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,14 @@ enum class ZealMode {
_(RegExpStatics) \
_(RegExpSharedBytecode) \
_(TypedArrayElements) \
_(TypeDescrTraceList)
_(TypeDescrTraceList) \
_(NativeIterator) \
_(JitScript) \
_(ObjectGroupAddendum) \
_(ScriptDebugScript) \
_(BreakpointSite) \
_(ForOfPIC) \
_(ForOfPICStub)

#define JS_FOR_EACH_MEMORY_USE(_) \
JS_FOR_EACH_PUBLIC_MEMORY_USE(_) \
Expand Down
3 changes: 2 additions & 1 deletion js/src/gc/Zone.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -701,7 +701,8 @@ void MemoryTracker::checkEmptyOnDestroy() {
inline bool MemoryTracker::allowMultipleAssociations(MemoryUse use) const {
// For most uses only one association is possible for each GC thing. Allow a
// one-to-many relationship only where necessary.
return use == MemoryUse::RegExpSharedBytecode;
return use == MemoryUse::RegExpSharedBytecode ||
use == MemoryUse::BreakpointSite || use == MemoryUse::ForOfPICStub;
}

void MemoryTracker::trackMemory(Cell* cell, size_t nbytes, MemoryUse use) {
Expand Down
2 changes: 1 addition & 1 deletion js/src/jit/BaselineCacheIRCompiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1831,7 +1831,7 @@ bool BaselineCacheIRCompiler::emitGuardAndGetIterator() {
EmitPreBarrier(masm, iterObjAddr, MIRType::Object);

// Mark iterator as active.
Address iterFlagsAddr(niScratch, NativeIterator::offsetOfFlags());
Address iterFlagsAddr(niScratch, NativeIterator::offsetOfFlagsAndCount());
masm.storePtr(obj, iterObjAddr);
masm.or32(Imm32(NativeIterator::Flags::Active), iterFlagsAddr);

Expand Down
2 changes: 1 addition & 1 deletion js/src/jit/Ion.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3145,7 +3145,7 @@ void jit::DestroyJitScripts(FreeOp* fop, JSScript* script) {
}

if (script->hasJitScript()) {
JitScript::Destroy(script->zone(), script->jitScript());
script->releaseJitScript();
}
}

Expand Down
2 changes: 1 addition & 1 deletion js/src/jit/IonCacheIRCompiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2210,7 +2210,7 @@ bool IonCacheIRCompiler::emitGuardAndGetIterator() {
EmitPreBarrier(masm, iterObjAddr, MIRType::Object);

// Mark iterator as active.
Address iterFlagsAddr(niScratch, NativeIterator::offsetOfFlags());
Address iterFlagsAddr(niScratch, NativeIterator::offsetOfFlagsAndCount());
masm.storePtr(obj, iterObjAddr);
masm.or32(Imm32(NativeIterator::Flags::Active), iterFlagsAddr);

Expand Down
16 changes: 12 additions & 4 deletions js/src/jit/JitScript.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,10 @@ static size_t NumTypeSets(JSScript* script) {
}

JitScript::JitScript(JSScript* script, uint32_t typeSetOffset,
uint32_t bytecodeTypeMapOffset)
uint32_t bytecodeTypeMapOffset, uint32_t allocBytes)
: typeSetOffset_(typeSetOffset),
bytecodeTypeMapOffset_(bytecodeTypeMapOffset) {
bytecodeTypeMapOffset_(bytecodeTypeMapOffset),
allocBytes_(allocBytes) {
setTypesGeneration(script->zone()->types.generation);

uint8_t* base = reinterpret_cast<uint8_t*>(this);
Expand Down Expand Up @@ -98,8 +99,8 @@ bool JSScript::createJitScript(JSContext* cx) {
uint32_t typeSetOffset = sizeof(JitScript) + numICEntries() * sizeof(ICEntry);
uint32_t bytecodeTypeMapOffset =
typeSetOffset + numTypeSets * sizeof(StackTypeSet);
UniquePtr<JitScript> jitScript(
new (raw) JitScript(this, typeSetOffset, bytecodeTypeMapOffset));
UniquePtr<JitScript> jitScript(new (raw) JitScript(
this, typeSetOffset, bytecodeTypeMapOffset, allocSize.value()));

// Sanity check the length computations.
MOZ_ASSERT(jitScript->numICEntries() == numICEntries());
Expand All @@ -116,6 +117,7 @@ bool JSScript::createJitScript(JSContext* cx) {
MOZ_ASSERT(!jitScript_);
prepareForDestruction.release();
jitScript_ = jitScript.release();
AddCellMemory(this, allocSize.value(), MemoryUse::JitScript);

// We have a JitScript so we can set the script's jitCodeRaw_ pointer to the
// Baseline Interpreter code.
Expand Down Expand Up @@ -150,8 +152,14 @@ void JSScript::maybeReleaseJitScript() {
return;
}

releaseJitScript();
}

void JSScript::releaseJitScript() {
MOZ_ASSERT(!hasIonScript());

RemoveCellMemory(this, jitScript_->allocBytes(), MemoryUse::JitScript);

JitScript::Destroy(zone(), jitScript_);
jitScript_ = nullptr;
updateJitCodeRaw(runtimeFromMainThread());
Expand Down
7 changes: 6 additions & 1 deletion js/src/jit/JitScript.h
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,9 @@ class alignas(uintptr_t) JitScript final {
// bytecode map queries are in linear order.
uint32_t bytecodeTypeMapHint_ = 0;

// The size of this allocation.
uint32_t allocBytes_ = 0;

struct Flags {
// Flag set when discarding JIT code to indicate this script is on the stack
// and type information and JIT code should not be discarded.
Expand Down Expand Up @@ -147,7 +150,7 @@ class alignas(uintptr_t) JitScript final {

public:
JitScript(JSScript* script, uint32_t typeSetOffset,
uint32_t bytecodeTypeMapOffset);
uint32_t bytecodeTypeMapOffset, uint32_t allocBytes);

#ifdef DEBUG
~JitScript() {
Expand Down Expand Up @@ -327,6 +330,8 @@ class alignas(uintptr_t) JitScript final {
uint32_t idx);
void removeDependentWasmImport(wasm::Instance& instance, uint32_t idx);
void unlinkDependentWasmImports();

size_t allocBytes() const { return allocBytes_; }
};

// Ensures no JitScripts are purged in the current zone.
Expand Down
4 changes: 2 additions & 2 deletions js/src/jit/MacroAssembler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3462,7 +3462,7 @@ void MacroAssembler::debugAssertObjHasFixedSlots(Register obj,
void MacroAssembler::branchIfNativeIteratorNotReusable(Register ni,
Label* notReusable) {
// See NativeIterator::isReusable.
Address flagsAddr(ni, NativeIterator::offsetOfFlags());
Address flagsAddr(ni, NativeIterator::offsetOfFlagsAndCount());

#ifdef DEBUG
Label niIsInitialized;
Expand Down Expand Up @@ -3530,7 +3530,7 @@ void MacroAssembler::iteratorClose(Register obj, Register temp1, Register temp2,

// Clear active bit.
and32(Imm32(~NativeIterator::Flags::Active),
Address(temp1, NativeIterator::offsetOfFlags()));
Address(temp1, NativeIterator::offsetOfFlagsAndCount()));

// Reset property cursor.
loadPtr(Address(temp1, NativeIterator::offsetOfGuardsEnd()), temp2);
Expand Down
48 changes: 35 additions & 13 deletions js/src/vm/Iteration.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -592,14 +592,7 @@ static PropertyIteratorObject* NewPropertyIteratorObject(JSContext* cx) {
return res;
}

static PropertyIteratorObject* CreatePropertyIterator(
JSContext* cx, Handle<JSObject*> objBeingIterated, HandleIdVector props,
uint32_t numGuards, uint32_t guardKey) {
Rooted<PropertyIteratorObject*> propIter(cx, NewPropertyIteratorObject(cx));
if (!propIter) {
return nullptr;
}

static inline size_t ExtraStringCount(size_t propertyCount, size_t guardCount) {
static_assert(sizeof(ReceiverGuard) == 2 * sizeof(GCPtrFlatString),
"NativeIterators are allocated in space for 1) themselves, "
"2) the properties a NativeIterator iterates (as "
Expand All @@ -608,9 +601,24 @@ static PropertyIteratorObject* CreatePropertyIterator(
"this size-relationship when determining the extra space to "
"allocate");

size_t extraCount = props.length() + numGuards * 2;
void* mem =
cx->pod_malloc_with_extra<NativeIterator, GCPtrFlatString>(extraCount);
return propertyCount + guardCount * 2;
}

static inline size_t AllocationSize(size_t propertyCount, size_t guardCount) {
return sizeof(NativeIterator) +
ExtraStringCount(propertyCount, guardCount) * sizeof(GCPtrFlatString);
}

static PropertyIteratorObject* CreatePropertyIterator(
JSContext* cx, Handle<JSObject*> objBeingIterated, HandleIdVector props,
uint32_t numGuards, uint32_t guardKey) {
Rooted<PropertyIteratorObject*> propIter(cx, NewPropertyIteratorObject(cx));
if (!propIter) {
return nullptr;
}

void* mem = cx->pod_malloc_with_extra<NativeIterator, GCPtrFlatString>(
ExtraStringCount(props.length(), numGuards));
if (!mem) {
return nullptr;
}
Expand Down Expand Up @@ -680,14 +688,23 @@ NativeIterator::NativeIterator(JSContext* cx,
reinterpret_cast<GCPtrFlatString*>(guardsBegin() + numGuards)),
propertiesEnd_(propertyCursor_),
guardKey_(guardKey),
flags_(0) // note: no Flags::Initialized
flagsAndCount_(0) // note: no Flags::Initialized
{
MOZ_ASSERT(!*hadError);

// NOTE: This must be done first thing: PropertyIteratorObject::finalize
// can only free |this| (and not leak it) if this has happened.
propIter->setNativeIterator(this);

if (!setInitialPropertyCount(props.length())) {
ReportAllocationOverflow(cx);
*hadError = true;
return;
}

size_t nbytes = AllocationSize(props.length(), numGuards);
AddCellMemory(propIter, nbytes, MemoryUse::NativeIterator);

for (size_t i = 0, len = props.length(); i < len; i++) {
JSFlatString* str = IdToString(cx, props[i]);
if (!str) {
Expand Down Expand Up @@ -754,6 +771,11 @@ NativeIterator::NativeIterator(JSContext* cx,
MOZ_ASSERT(!*hadError);
}

inline size_t NativeIterator::allocationSize() const {
size_t numGuards = guardsEnd() - guardsBegin();
return AllocationSize(initialPropertyCount(), numGuards);
}

/* static */
bool IteratorHashPolicy::match(PropertyIteratorObject* obj,
const Lookup& lookup) {
Expand Down Expand Up @@ -1059,7 +1081,7 @@ void PropertyIteratorObject::trace(JSTracer* trc, JSObject* obj) {
void PropertyIteratorObject::finalize(FreeOp* fop, JSObject* obj) {
if (NativeIterator* ni =
obj->as<PropertyIteratorObject>().getNativeIterator()) {
fop->free_(ni);
fop->free_(obj, ni, ni->allocationSize(), MemoryUse::NativeIterator);
}
}

Expand Down
Loading

0 comments on commit a78e233

Please sign in to comment.