analysis.calculus.mean_valueMathlib.Analysis.Calculus.MeanValue

This file has been ported!

Changes since the initial port

The following section lists changes to this file in mathlib3 and mathlib4 that occured after the initial port. Most recent changes are shown first. Hovering over a commit will show all commits associated with the same mathlib3 commit.

Changes in mathlib3

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(no changes)

(last sync)

Changes in mathlib3port

mathlib3
mathlib3port
Diff
@@ -4,10 +4,10 @@ Released under Apache 2.0 license as described in the file LICENSE.
 Authors: Sébastien Gouëzel, Yury Kudryashov
 -/
 import Analysis.Calculus.Deriv.Slope
-import Analysis.Calculus.LocalExtr
+import Analysis.Calculus.LocalExtr.Basic
 import Analysis.Convex.Slope
 import Analysis.Convex.Normed
-import Data.IsROrC.Basic
+import Analysis.RCLike.Basic
 import Topology.Instances.RealVectorSpace
 
 #align_import analysis.calculus.mean_value from "leanprover-community/mathlib"@"2ebc1d6c2fed9f54c95bbc3998eaa5570527129a"
@@ -500,7 +500,7 @@ also assume `[normed_space ℝ E]` to have a notion of a `convex` set. -/
 
 section
 
-variable {𝕜 G : Type _} [IsROrC 𝕜] [NormedSpace 𝕜 E] [NormedAddCommGroup G] [NormedSpace 𝕜 G]
+variable {𝕜 G : Type _} [RCLike 𝕜] [NormedSpace 𝕜 E] [NormedAddCommGroup G] [NormedSpace 𝕜 G]
 
 namespace Convex
 
@@ -529,7 +529,7 @@ theorem norm_image_sub_le_of_norm_hasFDerivWithin_le (hf : ∀ x ∈ s, HasFDeri
     apply hs.segment_subset xs ys
   have : f x = f (g 0) := by simp only [g]; rw [zero_smul, add_zero]
   rw [this]
-  have : f y = f (g 1) := by simp only [g]; rw [one_smul, add_sub_cancel'_right]
+  have : f y = f (g 1) := by simp only [g]; rw [one_smul, add_sub_cancel]
   rw [this]
   have D2 : ∀ t ∈ Icc (0 : ℝ) 1, HasDerivWithinAt (f ∘ g) (f' (g t) (y - x)) (Icc 0 1) t :=
     by
@@ -885,7 +885,7 @@ theorem exists_hasDerivAt_eq_slope : ∃ c ∈ Ioo a b, f' c = (f b - f a) / (b
     ⟨c, cmem, hc⟩
   use c, cmem
   simp only [_root_.id, Pi.one_apply, mul_one] at hc
-  rw [← hc, mul_div_cancel_left]
+  rw [← hc, mul_div_cancel_left₀]
   exact ne_of_gt (sub_pos.2 hab)
 #align exists_has_deriv_at_eq_slope exists_hasDerivAt_eq_slope
 -/
@@ -924,7 +924,7 @@ theorem exists_deriv_eq_slope : ∃ c ∈ Ioo a b, deriv f c = (f b - f a) / (b
 
 end Interval
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:641:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:642:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.mul_sub_lt_image_sub_of_lt_deriv /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C < f'`, then
@@ -956,7 +956,7 @@ theorem mul_sub_lt_image_sub_of_lt_deriv {f : ℝ → ℝ} (hf : Differentiable
 #align mul_sub_lt_image_sub_of_lt_deriv mul_sub_lt_image_sub_of_lt_deriv
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:641:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:642:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.mul_sub_le_image_sub_of_le_deriv /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C ≤ f'`, then
@@ -989,7 +989,7 @@ theorem mul_sub_le_image_sub_of_le_deriv {f : ℝ → ℝ} (hf : Differentiable
 #align mul_sub_le_image_sub_of_le_deriv mul_sub_le_image_sub_of_le_deriv
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:641:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:642:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.image_sub_lt_mul_sub_of_deriv_lt /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' < C`, then
@@ -1021,7 +1021,7 @@ theorem image_sub_lt_mul_sub_of_deriv_lt {f : ℝ → ℝ} (hf : Differentiable
 #align image_sub_lt_mul_sub_of_deriv_lt image_sub_lt_mul_sub_of_deriv_lt
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:641:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:642:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.image_sub_le_mul_sub_of_deriv_le /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' ≤ C`, then
@@ -1578,7 +1578,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
 #align domain_mvt domain_mvt
 -/
 
-section IsROrC
+section RCLike
 
 /-!
 ### Vector-valued functions `f : E → F`.  Strict differentiability.
@@ -1591,7 +1591,7 @@ balls over `ℝ` or `ℂ`. For now, we only include the ones that we need.
 -/
 
 
-variable {𝕜 : Type _} [IsROrC 𝕜] {G : Type _} [NormedAddCommGroup G] [NormedSpace 𝕜 G] {H : Type _}
+variable {𝕜 : Type _} [RCLike 𝕜] {G : Type _} [NormedAddCommGroup G] [NormedSpace 𝕜 G] {H : Type _}
   [NormedAddCommGroup H] [NormedSpace 𝕜 H] {f : G → H} {f' : G → G →L[𝕜] H} {x : G}
 
 #print hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt /-
@@ -1630,5 +1630,5 @@ theorem hasStrictDerivAt_of_hasDerivAt_of_continuousAt {f f' : 𝕜 → G} {x :
 #align has_strict_deriv_at_of_has_deriv_at_of_continuous_at hasStrictDerivAt_of_hasDerivAt_of_continuousAt
 -/
 
-end IsROrC
+end RCLike
 
Diff
@@ -125,7 +125,7 @@ theorem image_le_of_liminf_slope_right_lt_deriv_boundary' {f f' : ℝ → ℝ} {
     refine' ⟨z, _, hz⟩
     have := (hfz.trans hzB).le
     rwa [slope_def_field, slope_def_field, div_le_div_right (sub_pos.2 hz.1), hxB,
-      sub_le_sub_iff_right] at this 
+      sub_le_sub_iff_right] at this
 #align image_le_of_liminf_slope_right_lt_deriv_boundary' image_le_of_liminf_slope_right_lt_deriv_boundary'
 -/
 
@@ -570,7 +570,7 @@ theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt (hs
   by
   obtain ⟨ε, ε0, hε⟩ : ∃ ε > 0, ball x ε ∩ s ⊆ {y | HasFDerivWithinAt f (f' y) s y ∧ ‖f' y‖₊ < K}
   exact mem_nhds_within_iff.1 (hder.and <| hcont.nnnorm.eventually (gt_mem_nhds hK))
-  rw [inter_comm] at hε 
+  rw [inter_comm] at hε
   refine' ⟨s ∩ ball x ε, inter_mem_nhdsWithin _ (ball_mem_nhds _ ε0), _⟩
   exact
     (hs.inter (convex_ball _ _)).lipschitzOnWith_of_nnnorm_hasFDerivWithin_le
@@ -705,7 +705,7 @@ theorem eqOn_of_fderivWithin_eq (hs : Convex ℝ s) (hf : DifferentiableOn 𝕜
     (hf' : ∀ x ∈ s, fderivWithin 𝕜 f s x = fderivWithin 𝕜 g s x) (hx : x ∈ s) (hfgx : f x = g x) :
     s.EqOn f g := by
   intro y hy
-  suffices f x - g x = f y - g y by rwa [hfgx, sub_self, eq_comm, sub_eq_zero] at this 
+  suffices f x - g x = f y - g y by rwa [hfgx, sub_self, eq_comm, sub_eq_zero] at this
   refine' hs.is_const_of_fderiv_within_eq_zero (hf.sub hg) _ hx hy
   intro z hz
   rw [fderivWithin_sub (hs' _ hz) (hf _ hz) (hg _ hz), sub_eq_zero, hf' _ hz]
@@ -884,7 +884,7 @@ theorem exists_hasDerivAt_eq_slope : ∃ c ∈ Ioo a b, f' c = (f b - f a) / (b
       fun x hx => hasDerivAt_id x with
     ⟨c, cmem, hc⟩
   use c, cmem
-  simp only [_root_.id, Pi.one_apply, mul_one] at hc 
+  simp only [_root_.id, Pi.one_apply, mul_one] at hc
   rw [← hc, mul_div_cancel_left]
   exact ne_of_gt (sub_pos.2 hab)
 #align exists_has_deriv_at_eq_slope exists_hasDerivAt_eq_slope
@@ -1220,7 +1220,7 @@ theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : C
   by
   by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
   · apply StrictMonoOn.exists_slope_lt_deriv_aux hf hxy hf'_mono h
-  · push_neg at h 
+  · push_neg at h
     rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
     obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ (a : ℝ) (H : a ∈ Ioo x w), (f w - f x) / (w - x) < deriv f a :=
       by
@@ -1275,7 +1275,7 @@ theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : C
   by
   by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
   · apply StrictMonoOn.exists_deriv_lt_slope_aux hf hxy hf'_mono h
-  · push_neg at h 
+  · push_neg at h
     rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
     obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ (a : ℝ) (H : a ∈ Ioo x w), deriv f a < (f w - f x) / (w - x) :=
       by
@@ -1553,7 +1553,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
     intro t ht; exact ⟨t, ht, rfl⟩
   have hseg' : Icc 0 1 ⊆ g ⁻¹' s := by
     rw [← image_subset_iff]; unfold image; change ∀ _, _
-    intro z Hz; rw [mem_set_of_eq] at Hz ; rcases Hz with ⟨t, Ht, hgt⟩
+    intro z Hz; rw [mem_set_of_eq] at Hz; rcases Hz with ⟨t, Ht, hgt⟩
     rw [← hgt]; exact hs.segment_subset xs ys (hseg t Ht)
   -- derivative of pullback of f under parametrization
   have hfg :
@@ -1563,7 +1563,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
     have hg : HasDerivAt g (y - x) t :=
       by
       have := ((hasDerivAt_id t).smul_const (y - x)).const_add x
-      rwa [one_smul] at this 
+      rwa [one_smul] at this
     exact (hf (g t) <| hseg' Ht).comp_hasDerivWithinAt _ hg.has_deriv_within_at hseg'
   -- apply 1-variable mean value theorem to pullback
   have hMVT : ∃ t ∈ Ioo (0 : ℝ) 1, (f' (g t) : E → ℝ) (y - x) = (f (g 1) - f (g 0)) / (1 - 0) :=
@@ -1608,7 +1608,7 @@ theorem hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
   refine' ⟨ε, ε0, _⟩
   -- simplify formulas involving the product E × E
   rintro ⟨a, b⟩ h
-  rw [← ball_prod_same, prod_mk_mem_set_prod_eq] at h 
+  rw [← ball_prod_same, prod_mk_mem_set_prod_eq] at h
   -- exploit the choice of ε as the modulus of continuity of f'
   have hf' : ∀ x' ∈ ball x ε, ‖f' x' - f' x‖ ≤ c := by intro x' H'; rw [← dist_eq_norm];
     exact le_of_lt (hε H').2
Diff
@@ -1053,20 +1053,20 @@ theorem image_sub_le_mul_sub_of_deriv_le {f : ℝ → ℝ} (hf : Differentiable
 #align image_sub_le_mul_sub_of_deriv_le image_sub_le_mul_sub_of_deriv_le
 -/
 
-#print Convex.strictMonoOn_of_deriv_pos /-
+#print strictMonoOn_of_deriv_pos /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is positive, then
 `f` is a strictly monotone function on `D`.
 Note that we don't require differentiability explicitly as it already implied by the derivative
 being strictly positive. -/
-theorem Convex.strictMonoOn_of_deriv_pos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, 0 < deriv f x) : StrictMonoOn f D :=
+theorem strictMonoOn_of_deriv_pos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
+    (hf' : ∀ x ∈ interior D, 0 < deriv f x) : StrictMonoOn f D :=
   by
   rintro x hx y hy
   simpa only [MulZeroClass.zero_mul, sub_pos] using
     hD.mul_sub_lt_image_sub_of_lt_deriv hf _ hf' x hx y hy
   exact fun z hz => (differentiableAt_of_deriv_ne_zero (hf' z hz).ne').DifferentiableWithinAt
-#align convex.strict_mono_on_of_deriv_pos Convex.strictMonoOn_of_deriv_pos
+#align convex.strict_mono_on_of_deriv_pos strictMonoOn_of_deriv_pos
 -/
 
 #print strictMono_of_deriv_pos /-
@@ -1083,16 +1083,16 @@ theorem strictMono_of_deriv_pos {f : ℝ → ℝ} (hf' : ∀ x, 0 < deriv f x) :
 #align strict_mono_of_deriv_pos strictMono_of_deriv_pos
 -/
 
-#print Convex.monotoneOn_of_deriv_nonneg /-
+#print monotoneOn_of_deriv_nonneg /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonnegative, then
 `f` is a monotone function on `D`. -/
-theorem Convex.monotoneOn_of_deriv_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : DifferentiableOn ℝ f (interior D))
-    (hf'_nonneg : ∀ x ∈ interior D, 0 ≤ deriv f x) : MonotoneOn f D := fun x hx y hy hxy => by
+theorem monotoneOn_of_deriv_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
+    (hf' : DifferentiableOn ℝ f (interior D)) (hf'_nonneg : ∀ x ∈ interior D, 0 ≤ deriv f x) :
+    MonotoneOn f D := fun x hx y hy hxy => by
   simpa only [MulZeroClass.zero_mul, sub_nonneg] using
     hD.mul_sub_le_image_sub_of_le_deriv hf hf' hf'_nonneg x hx y hy hxy
-#align convex.monotone_on_of_deriv_nonneg Convex.monotoneOn_of_deriv_nonneg
+#align convex.monotone_on_of_deriv_nonneg monotoneOn_of_deriv_nonneg
 -/
 
 #print monotone_of_deriv_nonneg /-
@@ -1106,18 +1106,17 @@ theorem monotone_of_deriv_nonneg {f : ℝ → ℝ} (hf : Differentiable ℝ f) (
 #align monotone_of_deriv_nonneg monotone_of_deriv_nonneg
 -/
 
-#print Convex.strictAntiOn_of_deriv_neg /-
+#print strictAntiOn_of_deriv_neg /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is negative, then
 `f` is a strictly antitone function on `D`. -/
-theorem Convex.strictAntiOn_of_deriv_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, deriv f x < 0) : StrictAntiOn f D :=
-  fun x hx y => by
+theorem strictAntiOn_of_deriv_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
+    (hf' : ∀ x ∈ interior D, deriv f x < 0) : StrictAntiOn f D := fun x hx y => by
   simpa only [MulZeroClass.zero_mul, sub_lt_zero] using
     hD.image_sub_lt_mul_sub_of_deriv_lt hf
       (fun z hz => (differentiableAt_of_deriv_ne_zero (hf' z hz).Ne).DifferentiableWithinAt) hf' x
       hx y
-#align convex.strict_anti_on_of_deriv_neg Convex.strictAntiOn_of_deriv_neg
+#align convex.strict_anti_on_of_deriv_neg strictAntiOn_of_deriv_neg
 -/
 
 #print strictAnti_of_deriv_neg /-
@@ -1134,16 +1133,16 @@ theorem strictAnti_of_deriv_neg {f : ℝ → ℝ} (hf' : ∀ x, deriv f x < 0) :
 #align strict_anti_of_deriv_neg strictAnti_of_deriv_neg
 -/
 
-#print Convex.antitoneOn_of_deriv_nonpos /-
+#print antitoneOn_of_deriv_nonpos /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonpositive, then
 `f` is an antitone function on `D`. -/
-theorem Convex.antitoneOn_of_deriv_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : DifferentiableOn ℝ f (interior D))
-    (hf'_nonpos : ∀ x ∈ interior D, deriv f x ≤ 0) : AntitoneOn f D := fun x hx y hy hxy => by
+theorem antitoneOn_of_deriv_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
+    (hf' : DifferentiableOn ℝ f (interior D)) (hf'_nonpos : ∀ x ∈ interior D, deriv f x ≤ 0) :
+    AntitoneOn f D := fun x hx y hy hxy => by
   simpa only [MulZeroClass.zero_mul, sub_nonpos] using
     hD.image_sub_le_mul_sub_of_deriv_le hf hf' hf'_nonpos x hx y hy hxy
-#align convex.antitone_on_of_deriv_nonpos Convex.antitoneOn_of_deriv_nonpos
+#align convex.antitone_on_of_deriv_nonpos antitoneOn_of_deriv_nonpos
 -/
 
 #print antitone_of_deriv_nonpos /-
Diff
@@ -3,12 +3,12 @@ Copyright (c) 2019 Sébastien Gouëzel. All rights reserved.
 Released under Apache 2.0 license as described in the file LICENSE.
 Authors: Sébastien Gouëzel, Yury Kudryashov
 -/
-import Mathbin.Analysis.Calculus.Deriv.Slope
-import Mathbin.Analysis.Calculus.LocalExtr
-import Mathbin.Analysis.Convex.Slope
-import Mathbin.Analysis.Convex.Normed
-import Mathbin.Data.IsROrC.Basic
-import Mathbin.Topology.Instances.RealVectorSpace
+import Analysis.Calculus.Deriv.Slope
+import Analysis.Calculus.LocalExtr
+import Analysis.Convex.Slope
+import Analysis.Convex.Normed
+import Data.IsROrC.Basic
+import Topology.Instances.RealVectorSpace
 
 #align_import analysis.calculus.mean_value from "leanprover-community/mathlib"@"2ebc1d6c2fed9f54c95bbc3998eaa5570527129a"
 
@@ -924,7 +924,7 @@ theorem exists_deriv_eq_slope : ∃ c ∈ Ioo a b, deriv f c = (f b - f a) / (b
 
 end Interval
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:641:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.mul_sub_lt_image_sub_of_lt_deriv /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C < f'`, then
@@ -956,7 +956,7 @@ theorem mul_sub_lt_image_sub_of_lt_deriv {f : ℝ → ℝ} (hf : Differentiable
 #align mul_sub_lt_image_sub_of_lt_deriv mul_sub_lt_image_sub_of_lt_deriv
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:641:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.mul_sub_le_image_sub_of_le_deriv /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C ≤ f'`, then
@@ -989,7 +989,7 @@ theorem mul_sub_le_image_sub_of_le_deriv {f : ℝ → ℝ} (hf : Differentiable
 #align mul_sub_le_image_sub_of_le_deriv mul_sub_le_image_sub_of_le_deriv
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:641:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.image_sub_lt_mul_sub_of_deriv_lt /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' < C`, then
@@ -1021,7 +1021,7 @@ theorem image_sub_lt_mul_sub_of_deriv_lt {f : ℝ → ℝ} (hf : Differentiable
 #align image_sub_lt_mul_sub_of_deriv_lt image_sub_lt_mul_sub_of_deriv_lt
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:641:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.image_sub_le_mul_sub_of_deriv_le /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' ≤ C`, then
Diff
@@ -799,7 +799,7 @@ theorem lipschitzOnWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : ∀ x ∈ s, Diff
 then the function is `C`-Lipschitz.  Version with `deriv` and `lipschitz_with`. -/
 theorem lipschitzWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : Differentiable 𝕜 f)
     (bound : ∀ x, ‖deriv f x‖₊ ≤ C) : LipschitzWith C f :=
-  lipschitz_on_univ.1 <|
+  lipschitzOn_univ.1 <|
     convex_univ.lipschitzOnWith_of_nnnorm_deriv_le (fun x hx => hf x) fun x hx => bound x
 #align lipschitz_with_of_nnnorm_deriv_le lipschitzWith_of_nnnorm_deriv_le
 -/
Diff
@@ -2,11 +2,6 @@
 Copyright (c) 2019 Sébastien Gouëzel. All rights reserved.
 Released under Apache 2.0 license as described in the file LICENSE.
 Authors: Sébastien Gouëzel, Yury Kudryashov
-
-! This file was ported from Lean 3 source module analysis.calculus.mean_value
-! leanprover-community/mathlib commit 2ebc1d6c2fed9f54c95bbc3998eaa5570527129a
-! Please do not edit these lines, except to modify the commit id
-! if you have ported upstream changes.
 -/
 import Mathbin.Analysis.Calculus.Deriv.Slope
 import Mathbin.Analysis.Calculus.LocalExtr
@@ -15,6 +10,8 @@ import Mathbin.Analysis.Convex.Normed
 import Mathbin.Data.IsROrC.Basic
 import Mathbin.Topology.Instances.RealVectorSpace
 
+#align_import analysis.calculus.mean_value from "leanprover-community/mathlib"@"2ebc1d6c2fed9f54c95bbc3998eaa5570527129a"
+
 /-!
 # The mean value inequality and equalities
 
@@ -927,7 +924,7 @@ theorem exists_deriv_eq_slope : ∃ c ∈ Ioo a b, deriv f c = (f b - f a) / (b
 
 end Interval
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.mul_sub_lt_image_sub_of_lt_deriv /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C < f'`, then
@@ -959,7 +956,7 @@ theorem mul_sub_lt_image_sub_of_lt_deriv {f : ℝ → ℝ} (hf : Differentiable
 #align mul_sub_lt_image_sub_of_lt_deriv mul_sub_lt_image_sub_of_lt_deriv
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.mul_sub_le_image_sub_of_le_deriv /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C ≤ f'`, then
@@ -992,7 +989,7 @@ theorem mul_sub_le_image_sub_of_le_deriv {f : ℝ → ℝ} (hf : Differentiable
 #align mul_sub_le_image_sub_of_le_deriv mul_sub_le_image_sub_of_le_deriv
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.image_sub_lt_mul_sub_of_deriv_lt /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' < C`, then
@@ -1024,7 +1021,7 @@ theorem image_sub_lt_mul_sub_of_deriv_lt {f : ℝ → ℝ} (hf : Differentiable
 #align image_sub_lt_mul_sub_of_deriv_lt image_sub_lt_mul_sub_of_deriv_lt
 -/
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 #print Convex.image_sub_le_mul_sub_of_deriv_le /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' ≤ C`, then
Diff
@@ -84,6 +84,7 @@ open scoped Classical Topology NNReal
 /-! ### One-dimensional fencing inequalities -/
 
 
+#print image_le_of_liminf_slope_right_lt_deriv_boundary' /-
 /-- General fencing theorem for continuous functions with an estimate on the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -129,7 +130,9 @@ theorem image_le_of_liminf_slope_right_lt_deriv_boundary' {f f' : ℝ → ℝ} {
     rwa [slope_def_field, slope_def_field, div_le_div_right (sub_pos.2 hz.1), hxB,
       sub_le_sub_iff_right] at this 
 #align image_le_of_liminf_slope_right_lt_deriv_boundary' image_le_of_liminf_slope_right_lt_deriv_boundary'
+-/
 
+#print image_le_of_liminf_slope_right_lt_deriv_boundary /-
 /-- General fencing theorem for continuous functions with an estimate on the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -149,7 +152,9 @@ theorem image_le_of_liminf_slope_right_lt_deriv_boundary {f f' : ℝ → ℝ} {a
   image_le_of_liminf_slope_right_lt_deriv_boundary' hf hf' ha
     (fun x hx => (hB x).ContinuousAt.ContinuousWithinAt) (fun x hx => (hB x).HasDerivWithinAt) bound
 #align image_le_of_liminf_slope_right_lt_deriv_boundary image_le_of_liminf_slope_right_lt_deriv_boundary
+-/
 
+#print image_le_of_liminf_slope_right_le_deriv_boundary /-
 /-- General fencing theorem for continuous functions with an estimate on the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -183,7 +188,9 @@ theorem image_le_of_liminf_slope_right_le_deriv_boundary {f : ℝ → ℝ} {a b
     continuous_within_at_const.add (continuous_within_at_id.mul continuousWithinAt_const)
   convert continuous_within_at_const.closure_le _ this (Hr x hx) <;> simp
 #align image_le_of_liminf_slope_right_le_deriv_boundary image_le_of_liminf_slope_right_le_deriv_boundary
+-/
 
+#print image_le_of_deriv_right_lt_deriv_boundary' /-
 /-- General fencing theorem for continuous functions with an estimate on the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -201,7 +208,9 @@ theorem image_le_of_deriv_right_lt_deriv_boundary' {f f' : ℝ → ℝ} {a b : 
   image_le_of_liminf_slope_right_lt_deriv_boundary' hf
     (fun x hx r hr => (hf' x hx).liminf_right_slope_le hr) ha hB hB' bound
 #align image_le_of_deriv_right_lt_deriv_boundary' image_le_of_deriv_right_lt_deriv_boundary'
+-/
 
+#print image_le_of_deriv_right_lt_deriv_boundary /-
 /-- General fencing theorem for continuous functions with an estimate on the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -218,7 +227,9 @@ theorem image_le_of_deriv_right_lt_deriv_boundary {f f' : ℝ → ℝ} {a b : 
   image_le_of_deriv_right_lt_deriv_boundary' hf hf' ha
     (fun x hx => (hB x).ContinuousAt.ContinuousWithinAt) (fun x hx => (hB x).HasDerivWithinAt) bound
 #align image_le_of_deriv_right_lt_deriv_boundary image_le_of_deriv_right_lt_deriv_boundary
+-/
 
+#print image_le_of_deriv_right_le_deriv_boundary /-
 /-- General fencing theorem for continuous functions with an estimate on the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -236,6 +247,7 @@ theorem image_le_of_deriv_right_le_deriv_boundary {f f' : ℝ → ℝ} {a b : 
   image_le_of_liminf_slope_right_le_deriv_boundary hf ha hB hB' fun x hx r hr =>
     (hf' x hx).liminf_right_slope_le (lt_of_le_of_lt (bound x hx) hr)
 #align image_le_of_deriv_right_le_deriv_boundary image_le_of_deriv_right_le_deriv_boundary
+-/
 
 /-! ### Vector-valued functions `f : ℝ → E` -/
 
@@ -244,6 +256,7 @@ section
 
 variable {f : ℝ → E} {a b : ℝ}
 
+#print image_norm_le_of_liminf_right_slope_norm_lt_deriv_boundary /-
 /-- General fencing theorem for continuous functions with an estimate on the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -264,7 +277,9 @@ theorem image_norm_le_of_liminf_right_slope_norm_lt_deriv_boundary {E : Type _}
   image_le_of_liminf_slope_right_lt_deriv_boundary' (continuous_norm.comp_continuousOn hf) hf' ha hB
     hB' bound
 #align image_norm_le_of_liminf_right_slope_norm_lt_deriv_boundary image_norm_le_of_liminf_right_slope_norm_lt_deriv_boundary
+-/
 
+#print image_norm_le_of_norm_deriv_right_lt_deriv_boundary' /-
 /-- General fencing theorem for continuous functions with an estimate on the norm of the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -283,7 +298,9 @@ theorem image_norm_le_of_norm_deriv_right_lt_deriv_boundary' {f' : ℝ → E}
   image_norm_le_of_liminf_right_slope_norm_lt_deriv_boundary hf
     (fun x hx r hr => (hf' x hx).liminf_right_slope_norm_le hr) ha hB hB' bound
 #align image_norm_le_of_norm_deriv_right_lt_deriv_boundary' image_norm_le_of_norm_deriv_right_lt_deriv_boundary'
+-/
 
+#print image_norm_le_of_norm_deriv_right_lt_deriv_boundary /-
 /-- General fencing theorem for continuous functions with an estimate on the norm of the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -302,7 +319,9 @@ theorem image_norm_le_of_norm_deriv_right_lt_deriv_boundary {f' : ℝ → E}
   image_norm_le_of_norm_deriv_right_lt_deriv_boundary' hf hf' ha
     (fun x hx => (hB x).ContinuousAt.ContinuousWithinAt) (fun x hx => (hB x).HasDerivWithinAt) bound
 #align image_norm_le_of_norm_deriv_right_lt_deriv_boundary image_norm_le_of_norm_deriv_right_lt_deriv_boundary
+-/
 
+#print image_norm_le_of_norm_deriv_right_le_deriv_boundary' /-
 /-- General fencing theorem for continuous functions with an estimate on the norm of the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -321,7 +340,9 @@ theorem image_norm_le_of_norm_deriv_right_le_deriv_boundary' {f' : ℝ → E}
   image_le_of_liminf_slope_right_le_deriv_boundary (continuous_norm.comp_continuousOn hf) ha hB hB'
     fun x hx r hr => (hf' x hx).liminf_right_slope_norm_le (lt_of_le_of_lt (bound x hx) hr)
 #align image_norm_le_of_norm_deriv_right_le_deriv_boundary' image_norm_le_of_norm_deriv_right_le_deriv_boundary'
+-/
 
+#print image_norm_le_of_norm_deriv_right_le_deriv_boundary /-
 /-- General fencing theorem for continuous functions with an estimate on the norm of the derivative.
 Let `f` and `B` be continuous functions on `[a, b]` such that
 
@@ -340,7 +361,9 @@ theorem image_norm_le_of_norm_deriv_right_le_deriv_boundary {f' : ℝ → E}
   image_norm_le_of_norm_deriv_right_le_deriv_boundary' hf hf' ha
     (fun x hx => (hB x).ContinuousAt.ContinuousWithinAt) (fun x hx => (hB x).HasDerivWithinAt) bound
 #align image_norm_le_of_norm_deriv_right_le_deriv_boundary image_norm_le_of_norm_deriv_right_le_deriv_boundary
+-/
 
+#print norm_image_sub_le_of_norm_deriv_right_le_segment /-
 /-- A function on `[a, b]` with the norm of the right derivative bounded by `C`
 satisfies `‖f x - f a‖ ≤ C * (x - a)`. -/
 theorem norm_image_sub_le_of_norm_deriv_right_le_segment {f' : ℝ → E} {C : ℝ}
@@ -360,7 +383,9 @@ theorem norm_image_sub_le_of_norm_deriv_right_le_segment {f' : ℝ → E} {C : 
   convert image_norm_le_of_norm_deriv_right_le_deriv_boundary hg hg' _ hB bound
   simp only [g, B]; rw [sub_self, norm_zero, sub_self, MulZeroClass.mul_zero]
 #align norm_image_sub_le_of_norm_deriv_right_le_segment norm_image_sub_le_of_norm_deriv_right_le_segment
+-/
 
+#print norm_image_sub_le_of_norm_deriv_le_segment' /-
 /-- A function on `[a, b]` with the norm of the derivative within `[a, b]`
 bounded by `C` satisfies `‖f x - f a‖ ≤ C * (x - a)`, `has_deriv_within_at`
 version. -/
@@ -373,7 +398,9 @@ theorem norm_image_sub_le_of_norm_deriv_le_segment' {f' : ℝ → E} {C : ℝ}
       (fun x hx => _) bound
   exact (hf x <| Ico_subset_Icc_self hx).nhdsWithin (Icc_mem_nhdsWithin_Ici hx)
 #align norm_image_sub_le_of_norm_deriv_le_segment' norm_image_sub_le_of_norm_deriv_le_segment'
+-/
 
+#print norm_image_sub_le_of_norm_deriv_le_segment /-
 /-- A function on `[a, b]` with the norm of the derivative within `[a, b]`
 bounded by `C` satisfies `‖f x - f a‖ ≤ C * (x - a)`, `deriv_within`
 version. -/
@@ -384,7 +411,9 @@ theorem norm_image_sub_le_of_norm_deriv_le_segment {C : ℝ} (hf : Differentiabl
   refine' norm_image_sub_le_of_norm_deriv_le_segment' _ bound
   exact fun x hx => (hf x hx).HasDerivWithinAt
 #align norm_image_sub_le_of_norm_deriv_le_segment norm_image_sub_le_of_norm_deriv_le_segment
+-/
 
+#print norm_image_sub_le_of_norm_deriv_le_segment_01' /-
 /-- A function on `[0, 1]` with the norm of the derivative within `[0, 1]`
 bounded by `C` satisfies `‖f 1 - f 0‖ ≤ C`, `has_deriv_within_at`
 version. -/
@@ -394,7 +423,9 @@ theorem norm_image_sub_le_of_norm_deriv_le_segment_01' {f' : ℝ → E} {C : ℝ
   simpa only [sub_zero, mul_one] using
     norm_image_sub_le_of_norm_deriv_le_segment' hf bound 1 (right_mem_Icc.2 zero_le_one)
 #align norm_image_sub_le_of_norm_deriv_le_segment_01' norm_image_sub_le_of_norm_deriv_le_segment_01'
+-/
 
+#print norm_image_sub_le_of_norm_deriv_le_segment_01 /-
 /-- A function on `[0, 1]` with the norm of the derivative within `[0, 1]`
 bounded by `C` satisfies `‖f 1 - f 0‖ ≤ C`, `deriv_within` version. -/
 theorem norm_image_sub_le_of_norm_deriv_le_segment_01 {C : ℝ}
@@ -403,14 +434,18 @@ theorem norm_image_sub_le_of_norm_deriv_le_segment_01 {C : ℝ}
   simpa only [sub_zero, mul_one] using
     norm_image_sub_le_of_norm_deriv_le_segment hf bound 1 (right_mem_Icc.2 zero_le_one)
 #align norm_image_sub_le_of_norm_deriv_le_segment_01 norm_image_sub_le_of_norm_deriv_le_segment_01
+-/
 
+#print constant_of_has_deriv_right_zero /-
 theorem constant_of_has_deriv_right_zero (hcont : ContinuousOn f (Icc a b))
     (hderiv : ∀ x ∈ Ico a b, HasDerivWithinAt f 0 (Ici x) x) : ∀ x ∈ Icc a b, f x = f a := by
   simpa only [MulZeroClass.zero_mul, norm_le_zero_iff, sub_eq_zero] using fun x hx =>
     norm_image_sub_le_of_norm_deriv_right_le_segment hcont hderiv
       (fun y hy => by rw [norm_le_zero_iff]) x hx
 #align constant_of_has_deriv_right_zero constant_of_has_deriv_right_zero
+-/
 
+#print constant_of_derivWithin_zero /-
 theorem constant_of_derivWithin_zero (hdiff : DifferentiableOn ℝ f (Icc a b))
     (hderiv : ∀ x ∈ Ico a b, derivWithin f (Icc a b) x = 0) : ∀ x ∈ Icc a b, f x = f a :=
   by
@@ -419,6 +454,7 @@ theorem constant_of_derivWithin_zero (hdiff : DifferentiableOn ℝ f (Icc a b))
   simpa only [MulZeroClass.zero_mul, norm_le_zero_iff, sub_eq_zero] using fun x hx =>
     norm_image_sub_le_of_norm_deriv_le_segment hdiff H x hx
 #align constant_of_deriv_within_zero constant_of_derivWithin_zero
+-/
 
 variable {f' g : ℝ → E}
 
@@ -473,6 +509,7 @@ namespace Convex
 
 variable {f g : E → G} {C : ℝ} {s : Set E} {x y : E} {f' g' : E → E →L[𝕜] G} {φ : E →L[𝕜] G}
 
+#print Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le /-
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C`, then
 the function is `C`-Lipschitz. Version with `has_fderiv_within`. -/
 theorem norm_image_sub_le_of_norm_hasFDerivWithin_le (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x)
@@ -506,7 +543,9 @@ theorem norm_image_sub_le_of_norm_hasFDerivWithin_le (hf : ∀ x ∈ s, HasFDeri
   refine' fun t ht => le_of_op_norm_le _ _ _
   exact bound (g t) (segm <| Ico_subset_Icc_self ht)
 #align convex.norm_image_sub_le_of_norm_has_fderiv_within_le Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le
+-/
 
+#print Convex.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le /-
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C` on
 `s`, then the function is `C`-Lipschitz on `s`. Version with `has_fderiv_within` and
 `lipschitz_on_with`. -/
@@ -518,7 +557,9 @@ theorem lipschitzOnWith_of_nnnorm_hasFDerivWithin_le {C : ℝ≥0}
   intro x x_in y y_in
   exact hs.norm_image_sub_le_of_norm_has_fderiv_within_le hf bound y_in x_in
 #align convex.lipschitz_on_with_of_nnnorm_has_fderiv_within_le Convex.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le
+-/
 
+#print Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt /-
 /-- Let `s` be a convex set in a real normed vector space `E`, let `f : E → G` be a function
 differentiable within `s` in a neighborhood of `x : E` with derivative `f'`. Suppose that `f'` is
 continuous within `s` at `x`. Then for any number `K : ℝ≥0` larger than `‖f' x‖₊`, `f` is
@@ -538,7 +579,9 @@ theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt (hs
     (hs.inter (convex_ball _ _)).lipschitzOnWith_of_nnnorm_hasFDerivWithin_le
       (fun y hy => (hε hy).1.mono (inter_subset_left _ _)) fun y hy => (hε hy).2.le
 #align convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at_of_nnnorm_lt Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt
+-/
 
+#print Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt /-
 /-- Let `s` be a convex set in a real normed vector space `E`, let `f : E → G` be a function
 differentiable within `s` in a neighborhood of `x : E` with derivative `f'`. Suppose that `f'` is
 continuous within `s` at `x`. Then for any number `K : ℝ≥0` larger than `‖f' x‖₊`, `f` is Lipschitz
@@ -551,7 +594,9 @@ theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt (hs : Convex ℝ
   (exists_gt _).imp <|
     hs.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt hder hcont
 #align convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt
+-/
 
+#print Convex.norm_image_sub_le_of_norm_fderivWithin_le /-
 /-- The mean value theorem on a convex set: if the derivative of a function within this set is
 bounded by `C`, then the function is `C`-Lipschitz. Version with `fderiv_within`. -/
 theorem norm_image_sub_le_of_norm_fderivWithin_le (hf : DifferentiableOn 𝕜 f s)
@@ -560,7 +605,9 @@ theorem norm_image_sub_le_of_norm_fderivWithin_le (hf : DifferentiableOn 𝕜 f
   hs.norm_image_sub_le_of_norm_hasFDerivWithin_le (fun x hx => (hf x hx).HasFDerivWithinAt) bound xs
     ys
 #align convex.norm_image_sub_le_of_norm_fderiv_within_le Convex.norm_image_sub_le_of_norm_fderivWithin_le
+-/
 
+#print Convex.lipschitzOnWith_of_nnnorm_fderivWithin_le /-
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C` on
 `s`, then the function is `C`-Lipschitz on `s`. Version with `fderiv_within` and
 `lipschitz_on_with`. -/
@@ -568,7 +615,9 @@ theorem lipschitzOnWith_of_nnnorm_fderivWithin_le {C : ℝ≥0} (hf : Differenti
     (bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x‖₊ ≤ C) (hs : Convex ℝ s) : LipschitzOnWith C f s :=
   hs.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le (fun x hx => (hf x hx).HasFDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_fderiv_within_le Convex.lipschitzOnWith_of_nnnorm_fderivWithin_le
+-/
 
+#print Convex.norm_image_sub_le_of_norm_fderiv_le /-
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C`,
 then the function is `C`-Lipschitz. Version with `fderiv`. -/
 theorem norm_image_sub_le_of_norm_fderiv_le (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
@@ -577,7 +626,9 @@ theorem norm_image_sub_le_of_norm_fderiv_le (hf : ∀ x ∈ s, DifferentiableAt
   hs.norm_image_sub_le_of_norm_hasFDerivWithin_le
     (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_le Convex.norm_image_sub_le_of_norm_fderiv_le
+-/
 
+#print Convex.lipschitzOnWith_of_nnnorm_fderiv_le /-
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C` on
 `s`, then the function is `C`-Lipschitz on `s`. Version with `fderiv` and `lipschitz_on_with`. -/
 theorem lipschitzOnWith_of_nnnorm_fderiv_le {C : ℝ≥0} (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
@@ -585,7 +636,9 @@ theorem lipschitzOnWith_of_nnnorm_fderiv_le {C : ℝ≥0} (hf : ∀ x ∈ s, Dif
   hs.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le
     (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_fderiv_le Convex.lipschitzOnWith_of_nnnorm_fderiv_le
+-/
 
+#print Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le' /-
 /-- Variant of the mean value inequality on a convex set, using a bound on the difference between
 the derivative and a fixed linear map, rather than a bound on the derivative itself. Version with
 `has_fderiv_within`. -/
@@ -605,7 +658,9 @@ theorem norm_image_sub_le_of_norm_hasFDerivWithin_le' (hf : ∀ x ∈ s, HasFDer
     _ = ‖g y - g x‖ := by simp
     _ ≤ C * ‖y - x‖ := Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le hg bound hs xs ys
 #align convex.norm_image_sub_le_of_norm_has_fderiv_within_le' Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le'
+-/
 
+#print Convex.norm_image_sub_le_of_norm_fderivWithin_le' /-
 /-- Variant of the mean value inequality on a convex set. Version with `fderiv_within`. -/
 theorem norm_image_sub_le_of_norm_fderivWithin_le' (hf : DifferentiableOn 𝕜 f s)
     (bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x - φ‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
@@ -613,7 +668,9 @@ theorem norm_image_sub_le_of_norm_fderivWithin_le' (hf : DifferentiableOn 𝕜 f
   hs.norm_image_sub_le_of_norm_hasFDerivWithin_le' (fun x hx => (hf x hx).HasFDerivWithinAt) bound
     xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_within_le' Convex.norm_image_sub_le_of_norm_fderivWithin_le'
+-/
 
+#print Convex.norm_image_sub_le_of_norm_fderiv_le' /-
 /-- Variant of the mean value inequality on a convex set. Version with `fderiv`. -/
 theorem norm_image_sub_le_of_norm_fderiv_le' (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
     (bound : ∀ x ∈ s, ‖fderiv 𝕜 f x - φ‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
@@ -621,7 +678,9 @@ theorem norm_image_sub_le_of_norm_fderiv_le' (hf : ∀ x ∈ s, DifferentiableAt
   hs.norm_image_sub_le_of_norm_hasFDerivWithin_le'
     (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_le' Convex.norm_image_sub_le_of_norm_fderiv_le'
+-/
 
+#print Convex.is_const_of_fderivWithin_eq_zero /-
 /-- If a function has zero Fréchet derivative at every point of a convex set,
 then it is a constant on this set. -/
 theorem is_const_of_fderivWithin_eq_zero (hs : Convex ℝ s) (hf : DifferentiableOn 𝕜 f s)
@@ -631,13 +690,17 @@ theorem is_const_of_fderivWithin_eq_zero (hs : Convex ℝ s) (hf : Differentiabl
   simpa only [(dist_eq_norm _ _).symm, MulZeroClass.zero_mul, dist_le_zero, eq_comm] using
     hs.norm_image_sub_le_of_norm_fderiv_within_le hf bound hx hy
 #align convex.is_const_of_fderiv_within_eq_zero Convex.is_const_of_fderivWithin_eq_zero
+-/
 
+#print is_const_of_fderiv_eq_zero /-
 theorem is_const_of_fderiv_eq_zero (hf : Differentiable 𝕜 f) (hf' : ∀ x, fderiv 𝕜 f x = 0)
     (x y : E) : f x = f y :=
   convex_univ.is_const_of_fderivWithin_eq_zero hf.DifferentiableOn
     (fun x _ => by rw [fderivWithin_univ] <;> exact hf' x) trivial trivial
 #align is_const_of_fderiv_eq_zero is_const_of_fderiv_eq_zero
+-/
 
+#print Convex.eqOn_of_fderivWithin_eq /-
 /-- If two functions have equal Fréchet derivatives at every point of a convex set, and are equal at
 one point in that set, then they are equal on that set. -/
 theorem eqOn_of_fderivWithin_eq (hs : Convex ℝ s) (hf : DifferentiableOn 𝕜 f s)
@@ -650,13 +713,16 @@ theorem eqOn_of_fderivWithin_eq (hs : Convex ℝ s) (hf : DifferentiableOn 𝕜
   intro z hz
   rw [fderivWithin_sub (hs' _ hz) (hf _ hz) (hg _ hz), sub_eq_zero, hf' _ hz]
 #align convex.eq_on_of_fderiv_within_eq Convex.eqOn_of_fderivWithin_eq
+-/
 
+#print eq_of_fderiv_eq /-
 theorem eq_of_fderiv_eq (hf : Differentiable 𝕜 f) (hg : Differentiable 𝕜 g)
     (hf' : ∀ x, fderiv 𝕜 f x = fderiv 𝕜 g x) (x : E) (hfgx : f x = g x) : f = g :=
   suffices Set.univ.EqOn f g from funext fun x => this <| mem_univ x
   convex_univ.eqOn_of_fderivWithin_eq hf.DifferentiableOn hg.DifferentiableOn uniqueDiffOn_univ
     (fun x hx => by simpa using hf' _) (mem_univ _) hfgx
 #align eq_of_fderiv_eq eq_of_fderiv_eq
+-/
 
 end Convex
 
@@ -664,6 +730,7 @@ namespace Convex
 
 variable {f f' : 𝕜 → G} {s : Set 𝕜} {x y : 𝕜}
 
+#print Convex.norm_image_sub_le_of_norm_hasDerivWithin_le /-
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
 bounded by `C`, then the function is `C`-Lipschitz. Version with `has_deriv_within`. -/
 theorem norm_image_sub_le_of_norm_hasDerivWithin_le {C : ℝ}
@@ -672,7 +739,9 @@ theorem norm_image_sub_le_of_norm_hasDerivWithin_le {C : ℝ}
   Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le (fun x hx => (hf x hx).HasFDerivWithinAt)
     (fun x hx => le_trans (by simp) (bound x hx)) hs xs ys
 #align convex.norm_image_sub_le_of_norm_has_deriv_within_le Convex.norm_image_sub_le_of_norm_hasDerivWithin_le
+-/
 
+#print Convex.lipschitzOnWith_of_nnnorm_hasDerivWithin_le /-
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
 bounded by `C` on `s`, then the function is `C`-Lipschitz on `s`.
 Version with `has_deriv_within` and `lipschitz_on_with`. -/
@@ -682,7 +751,9 @@ theorem lipschitzOnWith_of_nnnorm_hasDerivWithin_le {C : ℝ≥0} (hs : Convex 
   Convex.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le (fun x hx => (hf x hx).HasFDerivWithinAt)
     (fun x hx => le_trans (by simp) (bound x hx)) hs
 #align convex.lipschitz_on_with_of_nnnorm_has_deriv_within_le Convex.lipschitzOnWith_of_nnnorm_hasDerivWithin_le
+-/
 
+#print Convex.norm_image_sub_le_of_norm_derivWithin_le /-
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function within
 this set is bounded by `C`, then the function is `C`-Lipschitz. Version with `deriv_within` -/
 theorem norm_image_sub_le_of_norm_derivWithin_le {C : ℝ} (hf : DifferentiableOn 𝕜 f s)
@@ -691,7 +762,9 @@ theorem norm_image_sub_le_of_norm_derivWithin_le {C : ℝ} (hf : DifferentiableO
   hs.norm_image_sub_le_of_norm_hasDerivWithin_le (fun x hx => (hf x hx).HasDerivWithinAt) bound xs
     ys
 #align convex.norm_image_sub_le_of_norm_deriv_within_le Convex.norm_image_sub_le_of_norm_derivWithin_le
+-/
 
+#print Convex.lipschitzOnWith_of_nnnorm_derivWithin_le /-
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
 bounded by `C` on `s`, then the function is `C`-Lipschitz on `s`.
 Version with `deriv_within` and `lipschitz_on_with`. -/
@@ -700,7 +773,9 @@ theorem lipschitzOnWith_of_nnnorm_derivWithin_le {C : ℝ≥0} (hs : Convex ℝ
     LipschitzOnWith C f s :=
   hs.lipschitzOnWith_of_nnnorm_hasDerivWithin_le (fun x hx => (hf x hx).HasDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_deriv_within_le Convex.lipschitzOnWith_of_nnnorm_derivWithin_le
+-/
 
+#print Convex.norm_image_sub_le_of_norm_deriv_le /-
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
 bounded by `C`, then the function is `C`-Lipschitz. Version with `deriv`. -/
 theorem norm_image_sub_le_of_norm_deriv_le {C : ℝ} (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
@@ -709,7 +784,9 @@ theorem norm_image_sub_le_of_norm_deriv_le {C : ℝ} (hf : ∀ x ∈ s, Differen
   hs.norm_image_sub_le_of_norm_hasDerivWithin_le (fun x hx => (hf x hx).HasDerivAt.HasDerivWithinAt)
     bound xs ys
 #align convex.norm_image_sub_le_of_norm_deriv_le Convex.norm_image_sub_le_of_norm_deriv_le
+-/
 
+#print Convex.lipschitzOnWith_of_nnnorm_deriv_le /-
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
 bounded by `C` on `s`, then the function is `C`-Lipschitz on `s`.
 Version with `deriv` and `lipschitz_on_with`. -/
@@ -718,7 +795,9 @@ theorem lipschitzOnWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : ∀ x ∈ s, Diff
   hs.lipschitzOnWith_of_nnnorm_hasDerivWithin_le (fun x hx => (hf x hx).HasDerivAt.HasDerivWithinAt)
     bound
 #align convex.lipschitz_on_with_of_nnnorm_deriv_le Convex.lipschitzOnWith_of_nnnorm_deriv_le
+-/
 
+#print lipschitzWith_of_nnnorm_deriv_le /-
 /-- The mean value theorem set in dimension 1: if the derivative of a function is bounded by `C`,
 then the function is `C`-Lipschitz.  Version with `deriv` and `lipschitz_with`. -/
 theorem lipschitzWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : Differentiable 𝕜 f)
@@ -726,13 +805,16 @@ theorem lipschitzWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : Differentiable 𝕜
   lipschitz_on_univ.1 <|
     convex_univ.lipschitzOnWith_of_nnnorm_deriv_le (fun x hx => hf x) fun x hx => bound x
 #align lipschitz_with_of_nnnorm_deriv_le lipschitzWith_of_nnnorm_deriv_le
+-/
 
+#print is_const_of_deriv_eq_zero /-
 /-- If `f : 𝕜 → G`, `𝕜 = R` or `𝕜 = ℂ`, is differentiable everywhere and its derivative equal zero,
 then it is a constant function. -/
 theorem is_const_of_deriv_eq_zero (hf : Differentiable 𝕜 f) (hf' : ∀ x, deriv f x = 0) (x y : 𝕜) :
     f x = f y :=
   is_const_of_fderiv_eq_zero hf (fun z => by ext; simp [← deriv_fderiv, hf']) _ _
 #align is_const_of_deriv_eq_zero is_const_of_deriv_eq_zero
+-/
 
 end Convex
 
@@ -749,8 +831,7 @@ variable (f f' : ℝ → ℝ) {a b : ℝ} (hab : a < b) (hfc : ContinuousOn f (I
   (g g' : ℝ → ℝ) (hgc : ContinuousOn g (Icc a b)) (hgg' : ∀ x ∈ Ioo a b, HasDerivAt g (g' x) x)
   (hgd : DifferentiableOn ℝ g (Ioo a b))
 
-include hab hfc hff' hgc hgg'
-
+#print exists_ratio_hasDerivAt_eq_ratio_slope /-
 /-- Cauchy's **Mean Value Theorem**, `has_deriv_at` version. -/
 theorem exists_ratio_hasDerivAt_eq_ratio_slope :
     ∃ c ∈ Ioo a b, (g b - g a) * f' c = (f b - f a) * g' c :=
@@ -765,9 +846,9 @@ theorem exists_ratio_hasDerivAt_eq_ratio_slope :
   rcases exists_hasDerivAt_eq_zero h h' hab hhc hI hhh' with ⟨c, cmem, hc⟩
   exact ⟨c, cmem, sub_eq_zero.1 hc⟩
 #align exists_ratio_has_deriv_at_eq_ratio_slope exists_ratio_hasDerivAt_eq_ratio_slope
+-/
 
-omit hfc hgc
-
+#print exists_ratio_hasDerivAt_eq_ratio_slope' /-
 /-- Cauchy's **Mean Value Theorem**, extended `has_deriv_at` version. -/
 theorem exists_ratio_hasDerivAt_eq_ratio_slope' {lfa lga lfb lgb : ℝ}
     (hff' : ∀ x ∈ Ioo a b, HasDerivAt f (f' x) x) (hgg' : ∀ x ∈ Ioo a b, HasDerivAt g (g' x) x)
@@ -796,11 +877,9 @@ theorem exists_ratio_hasDerivAt_eq_ratio_slope' {lfa lga lfb lgb : ℝ}
   rcases exists_hasDerivAt_eq_zero' hab hha hhb hhh' with ⟨c, cmem, hc⟩
   exact ⟨c, cmem, sub_eq_zero.1 hc⟩
 #align exists_ratio_has_deriv_at_eq_ratio_slope' exists_ratio_hasDerivAt_eq_ratio_slope'
+-/
 
-include hfc
-
-omit hgg'
-
+#print exists_hasDerivAt_eq_slope /-
 /-- Lagrange's Mean Value Theorem, `has_deriv_at` version -/
 theorem exists_hasDerivAt_eq_slope : ∃ c ∈ Ioo a b, f' c = (f b - f a) / (b - a) :=
   by
@@ -812,9 +891,9 @@ theorem exists_hasDerivAt_eq_slope : ∃ c ∈ Ioo a b, f' c = (f b - f a) / (b
   rw [← hc, mul_div_cancel_left]
   exact ne_of_gt (sub_pos.2 hab)
 #align exists_has_deriv_at_eq_slope exists_hasDerivAt_eq_slope
+-/
 
-omit hff'
-
+#print exists_ratio_deriv_eq_ratio_slope /-
 /-- Cauchy's Mean Value Theorem, `deriv` version. -/
 theorem exists_ratio_deriv_eq_ratio_slope :
     ∃ c ∈ Ioo a b, (g b - g a) * deriv f c = (f b - f a) * deriv g c :=
@@ -823,9 +902,9 @@ theorem exists_ratio_deriv_eq_ratio_slope :
     (deriv g) hgc fun x hx =>
     ((hgd x hx).DifferentiableAt <| IsOpen.mem_nhds isOpen_Ioo hx).HasDerivAt
 #align exists_ratio_deriv_eq_ratio_slope exists_ratio_deriv_eq_ratio_slope
+-/
 
-omit hfc
-
+#print exists_ratio_deriv_eq_ratio_slope' /-
 /-- Cauchy's Mean Value Theorem, extended `deriv` version. -/
 theorem exists_ratio_deriv_eq_ratio_slope' {lfa lga lfb lgb : ℝ}
     (hdf : DifferentiableOn ℝ f <| Ioo a b) (hdg : DifferentiableOn ℝ g <| Ioo a b)
@@ -836,16 +915,20 @@ theorem exists_ratio_deriv_eq_ratio_slope' {lfa lga lfb lgb : ℝ}
     (fun x hx => ((hdf x hx).DifferentiableAt <| Ioo_mem_nhds hx.1 hx.2).HasDerivAt)
     (fun x hx => ((hdg x hx).DifferentiableAt <| Ioo_mem_nhds hx.1 hx.2).HasDerivAt) hfa hga hfb hgb
 #align exists_ratio_deriv_eq_ratio_slope' exists_ratio_deriv_eq_ratio_slope'
+-/
 
+#print exists_deriv_eq_slope /-
 /-- Lagrange's **Mean Value Theorem**, `deriv` version. -/
 theorem exists_deriv_eq_slope : ∃ c ∈ Ioo a b, deriv f c = (f b - f a) / (b - a) :=
   exists_hasDerivAt_eq_slope f (deriv f) hab hfc fun x hx =>
     ((hfd x hx).DifferentiableAt <| IsOpen.mem_nhds isOpen_Ioo hx).HasDerivAt
 #align exists_deriv_eq_slope exists_deriv_eq_slope
+-/
 
 end Interval
 
 /- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+#print Convex.mul_sub_lt_image_sub_of_lt_deriv /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C < f'`, then
 `f` grows faster than `C * x` on `D`, i.e., `C * (y - x) < f y - f x` whenever `x, y ∈ D`,
@@ -864,7 +947,9 @@ theorem Convex.mul_sub_lt_image_sub_of_lt_deriv {D : Set ℝ} (hD : Convex ℝ D
   have : C < (f y - f x) / (y - x) := by rw [← ha]; exact hf'_gt _ (hxyD' a_mem)
   exact (lt_div_iff (sub_pos.2 hxy)).1 this
 #align convex.mul_sub_lt_image_sub_of_lt_deriv Convex.mul_sub_lt_image_sub_of_lt_deriv
+-/
 
+#print mul_sub_lt_image_sub_of_lt_deriv /-
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `C < f'`, then `f` grows faster than
 `C * x`, i.e., `C * (y - x) < f y - f x` whenever `x < y`. -/
 theorem mul_sub_lt_image_sub_of_lt_deriv {f : ℝ → ℝ} (hf : Differentiable ℝ f) {C}
@@ -872,8 +957,10 @@ theorem mul_sub_lt_image_sub_of_lt_deriv {f : ℝ → ℝ} (hf : Differentiable
   convex_univ.mul_sub_lt_image_sub_of_lt_deriv hf.Continuous.ContinuousOn hf.DifferentiableOn
     (fun x _ => hf'_gt x) x trivial y trivial hxy
 #align mul_sub_lt_image_sub_of_lt_deriv mul_sub_lt_image_sub_of_lt_deriv
+-/
 
 /- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+#print Convex.mul_sub_le_image_sub_of_le_deriv /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C ≤ f'`, then
 `f` grows at least as fast as `C * x` on `D`, i.e., `C * (y - x) ≤ f y - f x` whenever `x, y ∈ D`,
@@ -893,7 +980,9 @@ theorem Convex.mul_sub_le_image_sub_of_le_deriv {D : Set ℝ} (hD : Convex ℝ D
   have : C ≤ (f y - f x) / (y - x) := by rw [← ha]; exact hf'_ge _ (hxyD' a_mem)
   exact (le_div_iff (sub_pos.2 hxy')).1 this
 #align convex.mul_sub_le_image_sub_of_le_deriv Convex.mul_sub_le_image_sub_of_le_deriv
+-/
 
+#print mul_sub_le_image_sub_of_le_deriv /-
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `C ≤ f'`, then `f` grows at least as fast
 as `C * x`, i.e., `C * (y - x) ≤ f y - f x` whenever `x ≤ y`. -/
 theorem mul_sub_le_image_sub_of_le_deriv {f : ℝ → ℝ} (hf : Differentiable ℝ f) {C}
@@ -901,8 +990,10 @@ theorem mul_sub_le_image_sub_of_le_deriv {f : ℝ → ℝ} (hf : Differentiable
   convex_univ.mul_sub_le_image_sub_of_le_deriv hf.Continuous.ContinuousOn hf.DifferentiableOn
     (fun x _ => hf'_ge x) x trivial y trivial hxy
 #align mul_sub_le_image_sub_of_le_deriv mul_sub_le_image_sub_of_le_deriv
+-/
 
 /- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+#print Convex.image_sub_lt_mul_sub_of_deriv_lt /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' < C`, then
 `f` grows slower than `C * x` on `D`, i.e., `f y - f x < C * (y - x)` whenever `x, y ∈ D`,
@@ -921,7 +1012,9 @@ theorem Convex.image_sub_lt_mul_sub_of_deriv_lt {D : Set ℝ} (hD : Convex ℝ D
   simpa [-neg_lt_neg_iff] using
     neg_lt_neg (hD.mul_sub_lt_image_sub_of_lt_deriv hf.neg hf'.neg hf'_gt x hx y hy hxy)
 #align convex.image_sub_lt_mul_sub_of_deriv_lt Convex.image_sub_lt_mul_sub_of_deriv_lt
+-/
 
+#print image_sub_lt_mul_sub_of_deriv_lt /-
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f' < C`, then `f` grows slower than
 `C * x` on `D`, i.e., `f y - f x < C * (y - x)` whenever `x < y`. -/
 theorem image_sub_lt_mul_sub_of_deriv_lt {f : ℝ → ℝ} (hf : Differentiable ℝ f) {C}
@@ -929,8 +1022,10 @@ theorem image_sub_lt_mul_sub_of_deriv_lt {f : ℝ → ℝ} (hf : Differentiable
   convex_univ.image_sub_lt_mul_sub_of_deriv_lt hf.Continuous.ContinuousOn hf.DifferentiableOn
     (fun x _ => lt_hf' x) x trivial y trivial hxy
 #align image_sub_lt_mul_sub_of_deriv_lt image_sub_lt_mul_sub_of_deriv_lt
+-/
 
 /- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+#print Convex.image_sub_le_mul_sub_of_deriv_le /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' ≤ C`, then
 `f` grows at most as fast as `C * x` on `D`, i.e., `f y - f x ≤ C * (y - x)` whenever `x, y ∈ D`,
@@ -949,7 +1044,9 @@ theorem Convex.image_sub_le_mul_sub_of_deriv_le {D : Set ℝ} (hD : Convex ℝ D
   simpa [-neg_le_neg_iff] using
     neg_le_neg (hD.mul_sub_le_image_sub_of_le_deriv hf.neg hf'.neg hf'_ge x hx y hy hxy)
 #align convex.image_sub_le_mul_sub_of_deriv_le Convex.image_sub_le_mul_sub_of_deriv_le
+-/
 
+#print image_sub_le_mul_sub_of_deriv_le /-
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f' ≤ C`, then `f` grows at most as fast
 as `C * x`, i.e., `f y - f x ≤ C * (y - x)` whenever `x ≤ y`. -/
 theorem image_sub_le_mul_sub_of_deriv_le {f : ℝ → ℝ} (hf : Differentiable ℝ f) {C}
@@ -957,7 +1054,9 @@ theorem image_sub_le_mul_sub_of_deriv_le {f : ℝ → ℝ} (hf : Differentiable
   convex_univ.image_sub_le_mul_sub_of_deriv_le hf.Continuous.ContinuousOn hf.DifferentiableOn
     (fun x _ => le_hf' x) x trivial y trivial hxy
 #align image_sub_le_mul_sub_of_deriv_le image_sub_le_mul_sub_of_deriv_le
+-/
 
+#print Convex.strictMonoOn_of_deriv_pos /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is positive, then
 `f` is a strictly monotone function on `D`.
@@ -971,7 +1070,9 @@ theorem Convex.strictMonoOn_of_deriv_pos {D : Set ℝ} (hD : Convex ℝ D) {f :
     hD.mul_sub_lt_image_sub_of_lt_deriv hf _ hf' x hx y hy
   exact fun z hz => (differentiableAt_of_deriv_ne_zero (hf' z hz).ne').DifferentiableWithinAt
 #align convex.strict_mono_on_of_deriv_pos Convex.strictMonoOn_of_deriv_pos
+-/
 
+#print strictMono_of_deriv_pos /-
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is positive, then
 `f` is a strictly monotone function.
 Note that we don't require differentiability explicitly as it already implied by the derivative
@@ -983,7 +1084,9 @@ theorem strictMono_of_deriv_pos {f : ℝ → ℝ} (hf' : ∀ x, 0 < deriv f x) :
         (differentiableAt_of_deriv_ne_zero (hf' z).ne').DifferentiableWithinAt.ContinuousWithinAt)
       fun x _ => hf' x
 #align strict_mono_of_deriv_pos strictMono_of_deriv_pos
+-/
 
+#print Convex.monotoneOn_of_deriv_nonneg /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonnegative, then
 `f` is a monotone function on `D`. -/
@@ -993,7 +1096,9 @@ theorem Convex.monotoneOn_of_deriv_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f :
   simpa only [MulZeroClass.zero_mul, sub_nonneg] using
     hD.mul_sub_le_image_sub_of_le_deriv hf hf' hf'_nonneg x hx y hy hxy
 #align convex.monotone_on_of_deriv_nonneg Convex.monotoneOn_of_deriv_nonneg
+-/
 
+#print monotone_of_deriv_nonneg /-
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is nonnegative, then
 `f` is a monotone function. -/
 theorem monotone_of_deriv_nonneg {f : ℝ → ℝ} (hf : Differentiable ℝ f) (hf' : ∀ x, 0 ≤ deriv f x) :
@@ -1002,7 +1107,9 @@ theorem monotone_of_deriv_nonneg {f : ℝ → ℝ} (hf : Differentiable ℝ f) (
     convex_univ.monotoneOn_of_deriv_nonneg hf.Continuous.ContinuousOn hf.DifferentiableOn fun x _ =>
       hf' x
 #align monotone_of_deriv_nonneg monotone_of_deriv_nonneg
+-/
 
+#print Convex.strictAntiOn_of_deriv_neg /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is negative, then
 `f` is a strictly antitone function on `D`. -/
@@ -1014,7 +1121,9 @@ theorem Convex.strictAntiOn_of_deriv_neg {D : Set ℝ} (hD : Convex ℝ D) {f :
       (fun z hz => (differentiableAt_of_deriv_ne_zero (hf' z hz).Ne).DifferentiableWithinAt) hf' x
       hx y
 #align convex.strict_anti_on_of_deriv_neg Convex.strictAntiOn_of_deriv_neg
+-/
 
+#print strictAnti_of_deriv_neg /-
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is negative, then
 `f` is a strictly antitone function.
 Note that we don't require differentiability explicitly as it already implied by the derivative
@@ -1026,7 +1135,9 @@ theorem strictAnti_of_deriv_neg {f : ℝ → ℝ} (hf' : ∀ x, deriv f x < 0) :
         (differentiableAt_of_deriv_ne_zero (hf' z).Ne).DifferentiableWithinAt.ContinuousWithinAt)
       fun x _ => hf' x
 #align strict_anti_of_deriv_neg strictAnti_of_deriv_neg
+-/
 
+#print Convex.antitoneOn_of_deriv_nonpos /-
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonpositive, then
 `f` is an antitone function on `D`. -/
@@ -1036,7 +1147,9 @@ theorem Convex.antitoneOn_of_deriv_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f :
   simpa only [MulZeroClass.zero_mul, sub_nonpos] using
     hD.image_sub_le_mul_sub_of_deriv_le hf hf' hf'_nonpos x hx y hy hxy
 #align convex.antitone_on_of_deriv_nonpos Convex.antitoneOn_of_deriv_nonpos
+-/
 
+#print antitone_of_deriv_nonpos /-
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is nonpositive, then
 `f` is an antitone function. -/
 theorem antitone_of_deriv_nonpos {f : ℝ → ℝ} (hf : Differentiable ℝ f) (hf' : ∀ x, deriv f x ≤ 0) :
@@ -1045,7 +1158,9 @@ theorem antitone_of_deriv_nonpos {f : ℝ → ℝ} (hf : Differentiable ℝ f) (
     convex_univ.antitoneOn_of_deriv_nonpos hf.Continuous.ContinuousOn hf.DifferentiableOn fun x _ =>
       hf' x
 #align antitone_of_deriv_nonpos antitone_of_deriv_nonpos
+-/
 
+#print MonotoneOn.convexOn_of_deriv /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is differentiable on its interior,
 and `f'` is monotone on the interior, then `f` is convex on `D`. -/
 theorem MonotoneOn.convexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
@@ -1070,7 +1185,9 @@ theorem MonotoneOn.convexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ
       rw [← ha, ← hb]
       exact hf'_mono (hxyD' ⟨hxa, hay⟩) (hyzD' ⟨hyb, hbz⟩) (hay.trans hyb).le)
 #align monotone_on.convex_on_of_deriv MonotoneOn.convexOn_of_deriv
+-/
 
+#print AntitoneOn.concaveOn_of_deriv /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is differentiable on its interior,
 and `f'` is antitone on the interior, then `f` is concave on `D`. -/
 theorem AntitoneOn.concaveOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
@@ -1082,7 +1199,9 @@ theorem AntitoneOn.concaveOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ
     convert neg_le_neg (h_anti hx hy hxy) <;> convert deriv.neg
   neg_convex_on_iff.mp (this.convex_on_of_deriv hD hf.neg hf'.neg)
 #align antitone_on.concave_on_of_deriv AntitoneOn.concaveOn_of_deriv
+-/
 
+#print StrictMonoOn.exists_slope_lt_deriv_aux /-
 theorem StrictMonoOn.exists_slope_lt_deriv_aux {x y : ℝ} {f : ℝ → ℝ} (hf : ContinuousOn f (Icc x y))
     (hxy : x < y) (hf'_mono : StrictMonoOn (deriv f) (Ioo x y)) (h : ∀ w ∈ Ioo x y, deriv f w ≠ 0) :
     ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a :=
@@ -1096,7 +1215,9 @@ theorem StrictMonoOn.exists_slope_lt_deriv_aux {x y : ℝ} {f : ℝ → ℝ} (hf
   rw [← ha]
   exact hf'_mono ⟨hxa, hay⟩ ⟨hxa.trans hab, hby⟩ hab
 #align strict_mono_on.exists_slope_lt_deriv_aux StrictMonoOn.exists_slope_lt_deriv_aux
+-/
 
+#print StrictMonoOn.exists_slope_lt_deriv /-
 theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : ContinuousOn f (Icc x y))
     (hxy : x < y) (hf'_mono : StrictMonoOn (deriv f) (Ioo x y)) :
     ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a :=
@@ -1133,7 +1254,9 @@ theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : C
         exact (hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hwb, hby⟩ hwb).le
     linarith
 #align strict_mono_on.exists_slope_lt_deriv StrictMonoOn.exists_slope_lt_deriv
+-/
 
+#print StrictMonoOn.exists_deriv_lt_slope_aux /-
 theorem StrictMonoOn.exists_deriv_lt_slope_aux {x y : ℝ} {f : ℝ → ℝ} (hf : ContinuousOn f (Icc x y))
     (hxy : x < y) (hf'_mono : StrictMonoOn (deriv f) (Ioo x y)) (h : ∀ w ∈ Ioo x y, deriv f w ≠ 0) :
     ∃ a ∈ Ioo x y, deriv f a < (f y - f x) / (y - x) :=
@@ -1147,7 +1270,9 @@ theorem StrictMonoOn.exists_deriv_lt_slope_aux {x y : ℝ} {f : ℝ → ℝ} (hf
   rw [← ha]
   exact hf'_mono ⟨hxb, hba.trans hay⟩ ⟨hxa, hay⟩ hba
 #align strict_mono_on.exists_deriv_lt_slope_aux StrictMonoOn.exists_deriv_lt_slope_aux
+-/
 
+#print StrictMonoOn.exists_deriv_lt_slope /-
 theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : ContinuousOn f (Icc x y))
     (hxy : x < y) (hf'_mono : StrictMonoOn (deriv f) (Ioo x y)) :
     ∃ a ∈ Ioo x y, deriv f a < (f y - f x) / (y - x) :=
@@ -1184,7 +1309,9 @@ theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : C
         exact (hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hwb, hby⟩ hwb).le
     linarith
 #align strict_mono_on.exists_deriv_lt_slope StrictMonoOn.exists_deriv_lt_slope
+-/
 
+#print StrictMonoOn.strictConvexOn_of_deriv /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, and `f'` is strictly monotone on the
 interior, then `f` is strictly convex on `D`.
 Note that we don't require differentiability, since it is guaranteed at all but at most
@@ -1211,7 +1338,9 @@ theorem StrictMonoOn.strictConvexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {
       apply ha.trans (lt_trans _ hb)
       exact hf' (hxyD' ⟨hxa, hay⟩) (hyzD' ⟨hyb, hbz⟩) (hay.trans hyb))
 #align strict_mono_on.strict_convex_on_of_deriv StrictMonoOn.strictConvexOn_of_deriv
+-/
 
+#print StrictAntiOn.strictConcaveOn_of_deriv /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f'` is strictly antitone on the
 interior, then `f` is strictly concave on `D`.
 Note that we don't require differentiability, since it is guaranteed at all but at most
@@ -1225,21 +1354,27 @@ theorem StrictAntiOn.strictConcaveOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D)
     convert neg_lt_neg (h_anti hx hy hxy) <;> convert deriv.neg
   neg_strict_convex_on_iff.mp (this.strict_convex_on_of_deriv hD hf.neg)
 #align strict_anti_on.strict_concave_on_of_deriv StrictAntiOn.strictConcaveOn_of_deriv
+-/
 
+#print Monotone.convexOn_univ_of_deriv /-
 /-- If a function `f` is differentiable and `f'` is monotone on `ℝ` then `f` is convex. -/
 theorem Monotone.convexOn_univ_of_deriv {f : ℝ → ℝ} (hf : Differentiable ℝ f)
     (hf'_mono : Monotone (deriv f)) : ConvexOn ℝ univ f :=
   (hf'_mono.MonotoneOn _).convexOn_of_deriv convex_univ hf.Continuous.ContinuousOn
     hf.DifferentiableOn
 #align monotone.convex_on_univ_of_deriv Monotone.convexOn_univ_of_deriv
+-/
 
+#print Antitone.concaveOn_univ_of_deriv /-
 /-- If a function `f` is differentiable and `f'` is antitone on `ℝ` then `f` is concave. -/
 theorem Antitone.concaveOn_univ_of_deriv {f : ℝ → ℝ} (hf : Differentiable ℝ f)
     (hf'_anti : Antitone (deriv f)) : ConcaveOn ℝ univ f :=
   (hf'_anti.AntitoneOn _).concaveOn_of_deriv convex_univ hf.Continuous.ContinuousOn
     hf.DifferentiableOn
 #align antitone.concave_on_univ_of_deriv Antitone.concaveOn_univ_of_deriv
+-/
 
+#print StrictMono.strictConvexOn_univ_of_deriv /-
 /-- If a function `f` is continuous and `f'` is strictly monotone on `ℝ` then `f` is strictly
 convex. Note that we don't require differentiability, since it is guaranteed at all but at most
 one point by the strict monotonicity of `f'`. -/
@@ -1247,7 +1382,9 @@ theorem StrictMono.strictConvexOn_univ_of_deriv {f : ℝ → ℝ} (hf : Continuo
     (hf'_mono : StrictMono (deriv f)) : StrictConvexOn ℝ univ f :=
   (hf'_mono.StrictMonoOn _).strictConvexOn_of_deriv convex_univ hf.ContinuousOn
 #align strict_mono.strict_convex_on_univ_of_deriv StrictMono.strictConvexOn_univ_of_deriv
+-/
 
+#print StrictAnti.strictConcaveOn_univ_of_deriv /-
 /-- If a function `f` is continuous and `f'` is strictly antitone on `ℝ` then `f` is strictly
 concave. Note that we don't require differentiability, since it is guaranteed at all but at most
 one point by the strict antitonicity of `f'`. -/
@@ -1255,7 +1392,9 @@ theorem StrictAnti.strictConcaveOn_univ_of_deriv {f : ℝ → ℝ} (hf : Continu
     (hf'_anti : StrictAnti (deriv f)) : StrictConcaveOn ℝ univ f :=
   (hf'_anti.StrictAntiOn _).strictConcaveOn_of_deriv convex_univ hf.ContinuousOn
 #align strict_anti.strict_concave_on_univ_of_deriv StrictAnti.strictConcaveOn_univ_of_deriv
+-/
 
+#print convexOn_of_deriv2_nonneg /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is twice differentiable on its
 interior, and `f''` is nonnegative on the interior, then `f` is convex on `D`. -/
 theorem convexOn_of_deriv2_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
@@ -1265,7 +1404,9 @@ theorem convexOn_of_deriv2_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ →
         rwa [interior_interior]).convexOn_of_deriv
     hD hf hf'
 #align convex_on_of_deriv2_nonneg convexOn_of_deriv2_nonneg
+-/
 
+#print concaveOn_of_deriv2_nonpos /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is twice differentiable on its
 interior, and `f''` is nonpositive on the interior, then `f` is concave on `D`. -/
 theorem concaveOn_of_deriv2_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
@@ -1275,7 +1416,9 @@ theorem concaveOn_of_deriv2_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ 
         rwa [interior_interior]).concaveOn_of_deriv
     hD hf hf'
 #align concave_on_of_deriv2_nonpos concaveOn_of_deriv2_nonpos
+-/
 
+#print strictConvexOn_of_deriv2_pos /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly positive on the
 interior, then `f` is strictly convex on `D`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
@@ -1289,7 +1432,9 @@ theorem strictConvexOn_of_deriv2_pos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ
         by rwa [interior_interior]).strictConvexOn_of_deriv
     hD hf
 #align strict_convex_on_of_deriv2_pos strictConvexOn_of_deriv2_pos
+-/
 
+#print strictConcaveOn_of_deriv2_neg /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly negative on the
 interior, then `f` is strictly concave on `D`.
 Note that we don't require twice differentiability explicitly as it already implied by the second
@@ -1303,7 +1448,9 @@ theorem strictConcaveOn_of_deriv2_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ
         by rwa [interior_interior]).strictConcaveOn_of_deriv
     hD hf
 #align strict_concave_on_of_deriv2_neg strictConcaveOn_of_deriv2_neg
+-/
 
+#print convexOn_of_deriv2_nonneg' /-
 /-- If a function `f` is twice differentiable on a open convex set `D ⊆ ℝ` and
 `f''` is nonnegative on `D`, then `f` is convex on `D`. -/
 theorem convexOn_of_deriv2_nonneg' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
@@ -1312,7 +1459,9 @@ theorem convexOn_of_deriv2_nonneg' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ 
   convexOn_of_deriv2_nonneg hD hf'.ContinuousOn (hf'.mono interior_subset)
     (hf''.mono interior_subset) fun x hx => hf''_nonneg x (interior_subset hx)
 #align convex_on_of_deriv2_nonneg' convexOn_of_deriv2_nonneg'
+-/
 
+#print concaveOn_of_deriv2_nonpos' /-
 /-- If a function `f` is twice differentiable on an open convex set `D ⊆ ℝ` and
 `f''` is nonpositive on `D`, then `f` is concave on `D`. -/
 theorem concaveOn_of_deriv2_nonpos' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
@@ -1321,7 +1470,9 @@ theorem concaveOn_of_deriv2_nonpos' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ 
   concaveOn_of_deriv2_nonpos hD hf'.ContinuousOn (hf'.mono interior_subset)
     (hf''.mono interior_subset) fun x hx => hf''_nonpos x (interior_subset hx)
 #align concave_on_of_deriv2_nonpos' concaveOn_of_deriv2_nonpos'
+-/
 
+#print strictConvexOn_of_deriv2_pos' /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly positive on `D`,
 then `f` is strictly convex on `D`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
@@ -1330,7 +1481,9 @@ theorem strictConvexOn_of_deriv2_pos' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ
     (hf : ContinuousOn f D) (hf'' : ∀ x ∈ D, 0 < ((deriv^[2]) f) x) : StrictConvexOn ℝ D f :=
   strictConvexOn_of_deriv2_pos hD hf fun x hx => hf'' x (interior_subset hx)
 #align strict_convex_on_of_deriv2_pos' strictConvexOn_of_deriv2_pos'
+-/
 
+#print strictConcaveOn_of_deriv2_neg' /-
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly negative on `D`,
 then `f` is strictly concave on `D`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
@@ -1339,7 +1492,9 @@ theorem strictConcaveOn_of_deriv2_neg' {D : Set ℝ} (hD : Convex ℝ D) {f : 
     (hf : ContinuousOn f D) (hf'' : ∀ x ∈ D, (deriv^[2]) f x < 0) : StrictConcaveOn ℝ D f :=
   strictConcaveOn_of_deriv2_neg hD hf fun x hx => hf'' x (interior_subset hx)
 #align strict_concave_on_of_deriv2_neg' strictConcaveOn_of_deriv2_neg'
+-/
 
+#print convexOn_univ_of_deriv2_nonneg /-
 /-- If a function `f` is twice differentiable on `ℝ`, and `f''` is nonnegative on `ℝ`,
 then `f` is convex on `ℝ`. -/
 theorem convexOn_univ_of_deriv2_nonneg {f : ℝ → ℝ} (hf' : Differentiable ℝ f)
@@ -1348,7 +1503,9 @@ theorem convexOn_univ_of_deriv2_nonneg {f : ℝ → ℝ} (hf' : Differentiable 
   convexOn_of_deriv2_nonneg' convex_univ hf'.DifferentiableOn hf''.DifferentiableOn fun x _ =>
     hf''_nonneg x
 #align convex_on_univ_of_deriv2_nonneg convexOn_univ_of_deriv2_nonneg
+-/
 
+#print concaveOn_univ_of_deriv2_nonpos /-
 /-- If a function `f` is twice differentiable on `ℝ`, and `f''` is nonpositive on `ℝ`,
 then `f` is concave on `ℝ`. -/
 theorem concaveOn_univ_of_deriv2_nonpos {f : ℝ → ℝ} (hf' : Differentiable ℝ f)
@@ -1357,7 +1514,9 @@ theorem concaveOn_univ_of_deriv2_nonpos {f : ℝ → ℝ} (hf' : Differentiable
   concaveOn_of_deriv2_nonpos' convex_univ hf'.DifferentiableOn hf''.DifferentiableOn fun x _ =>
     hf''_nonpos x
 #align concave_on_univ_of_deriv2_nonpos concaveOn_univ_of_deriv2_nonpos
+-/
 
+#print strictConvexOn_univ_of_deriv2_pos /-
 /-- If a function `f` is continuous on `ℝ`, and `f''` is strictly positive on `ℝ`,
 then `f` is strictly convex on `ℝ`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
@@ -1366,7 +1525,9 @@ theorem strictConvexOn_univ_of_deriv2_pos {f : ℝ → ℝ} (hf : Continuous f)
     (hf'' : ∀ x, 0 < ((deriv^[2]) f) x) : StrictConvexOn ℝ univ f :=
   strictConvexOn_of_deriv2_pos' convex_univ hf.ContinuousOn fun x _ => hf'' x
 #align strict_convex_on_univ_of_deriv2_pos strictConvexOn_univ_of_deriv2_pos
+-/
 
+#print strictConcaveOn_univ_of_deriv2_neg /-
 /-- If a function `f` is continuous on `ℝ`, and `f''` is strictly negative on `ℝ`,
 then `f` is strictly concave on `ℝ`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
@@ -1375,10 +1536,12 @@ theorem strictConcaveOn_univ_of_deriv2_neg {f : ℝ → ℝ} (hf : Continuous f)
     (hf'' : ∀ x, (deriv^[2]) f x < 0) : StrictConcaveOn ℝ univ f :=
   strictConcaveOn_of_deriv2_neg' convex_univ hf.ContinuousOn fun x _ => hf'' x
 #align strict_concave_on_univ_of_deriv2_neg strictConcaveOn_univ_of_deriv2_neg
+-/
 
 /-! ### Functions `f : E → ℝ` -/
 
 
+#print domain_mvt /-
 /-- Lagrange's Mean Value Theorem, applied to convex domains. -/
 theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ] ℝ}
     (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
@@ -1417,6 +1580,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
   use g t; refine' ⟨hseg t <| hIccIoo Ht, _⟩
   simp [g, hMVT']
 #align domain_mvt domain_mvt
+-/
 
 section IsROrC
 
@@ -1434,6 +1598,7 @@ balls over `ℝ` or `ℂ`. For now, we only include the ones that we need.
 variable {𝕜 : Type _} [IsROrC 𝕜] {G : Type _} [NormedAddCommGroup G] [NormedSpace 𝕜 G] {H : Type _}
   [NormedAddCommGroup H] [NormedSpace 𝕜 H] {f : G → H} {f' : G → G →L[𝕜] H} {x : G}
 
+#print hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt /-
 /-- Over the reals or the complexes, a continuously differentiable function is strictly
 differentiable. -/
 theorem hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
@@ -1456,7 +1621,9 @@ theorem hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
   refine' (convex_ball _ _).norm_image_sub_le_of_norm_hasFDerivWithin_le' _ hf' h.2 h.1
   exact fun y hy => (hε hy).1.HasFDerivWithinAt
 #align has_strict_fderiv_at_of_has_fderiv_at_of_continuous_at hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
+-/
 
+#print hasStrictDerivAt_of_hasDerivAt_of_continuousAt /-
 /-- Over the reals or the complexes, a continuously differentiable function is strictly
 differentiable. -/
 theorem hasStrictDerivAt_of_hasDerivAt_of_continuousAt {f f' : 𝕜 → G} {x : 𝕜}
@@ -1465,6 +1632,7 @@ theorem hasStrictDerivAt_of_hasDerivAt_of_continuousAt {f f' : 𝕜 → G} {x :
   hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt (hder.mono fun y hy => hy.HasFDerivAt) <|
     (smulRightL 𝕜 𝕜 G 1).Continuous.ContinuousAt.comp hcont
 #align has_strict_deriv_at_of_has_deriv_at_of_continuous_at hasStrictDerivAt_of_hasDerivAt_of_continuousAt
+-/
 
 end IsROrC
 
Diff
@@ -604,7 +604,6 @@ theorem norm_image_sub_le_of_norm_hasFDerivWithin_le' (hf : ∀ x ∈ s, HasFDer
     _ = ‖f y - φ y - (f x - φ x)‖ := by abel
     _ = ‖g y - g x‖ := by simp
     _ ≤ C * ‖y - x‖ := Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le hg bound hs xs ys
-    
 #align convex.norm_image_sub_le_of_norm_has_fderiv_within_le' Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le'
 
 /-- Variant of the mean value inequality on a convex set. Version with `fderiv_within`. -/
Diff
@@ -846,7 +846,7 @@ theorem exists_deriv_eq_slope : ∃ c ∈ Ioo a b, deriv f c = (f b - f a) / (b
 
 end Interval
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C < f'`, then
 `f` grows faster than `C * x` on `D`, i.e., `C * (y - x) < f y - f x` whenever `x, y ∈ D`,
@@ -874,7 +874,7 @@ theorem mul_sub_lt_image_sub_of_lt_deriv {f : ℝ → ℝ} (hf : Differentiable
     (fun x _ => hf'_gt x) x trivial y trivial hxy
 #align mul_sub_lt_image_sub_of_lt_deriv mul_sub_lt_image_sub_of_lt_deriv
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C ≤ f'`, then
 `f` grows at least as fast as `C * x` on `D`, i.e., `C * (y - x) ≤ f y - f x` whenever `x, y ∈ D`,
@@ -903,7 +903,7 @@ theorem mul_sub_le_image_sub_of_le_deriv {f : ℝ → ℝ} (hf : Differentiable
     (fun x _ => hf'_ge x) x trivial y trivial hxy
 #align mul_sub_le_image_sub_of_le_deriv mul_sub_le_image_sub_of_le_deriv
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' < C`, then
 `f` grows slower than `C * x` on `D`, i.e., `f y - f x < C * (y - x)` whenever `x, y ∈ D`,
@@ -931,7 +931,7 @@ theorem image_sub_lt_mul_sub_of_deriv_lt {f : ℝ → ℝ} (hf : Differentiable
     (fun x _ => lt_hf' x) x trivial y trivial hxy
 #align image_sub_lt_mul_sub_of_deriv_lt image_sub_lt_mul_sub_of_deriv_lt
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:638:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' ≤ C`, then
 `f` grows at most as fast as `C * x` on `D`, i.e., `f y - f x ≤ C * (y - x)` whenever `x, y ∈ D`,
Diff
@@ -4,7 +4,7 @@ Released under Apache 2.0 license as described in the file LICENSE.
 Authors: Sébastien Gouëzel, Yury Kudryashov
 
 ! This file was ported from Lean 3 source module analysis.calculus.mean_value
-! leanprover-community/mathlib commit 3bce8d800a6f2b8f63fe1e588fd76a9ff4adcebe
+! leanprover-community/mathlib commit 2ebc1d6c2fed9f54c95bbc3998eaa5570527129a
 ! Please do not edit these lines, except to modify the commit id
 ! if you have ported upstream changes.
 -/
@@ -18,6 +18,9 @@ import Mathbin.Topology.Instances.RealVectorSpace
 /-!
 # The mean value inequality and equalities
 
+> THIS FILE IS SYNCHRONIZED WITH MATHLIB4.
+> Any changes to this file require a corresponding PR to mathlib4.
+
 In this file we prove the following facts:
 
 * `convex.norm_image_sub_le_of_norm_deriv_le` : if `f` is differentiable on a convex set `s`
@@ -99,8 +102,8 @@ theorem image_le_of_liminf_slope_right_lt_deriv_boundary' {f f' : ℝ → ℝ} {
     (hB' : ∀ x ∈ Ico a b, HasDerivWithinAt B (B' x) (Ici x) x)
     (bound : ∀ x ∈ Ico a b, f x = B x → f' x < B' x) : ∀ ⦃x⦄, x ∈ Icc a b → f x ≤ B x :=
   by
-  change Icc a b ⊆ { x | f x ≤ B x }
-  set s := { x | f x ≤ B x } ∩ Icc a b
+  change Icc a b ⊆ {x | f x ≤ B x}
+  set s := {x | f x ≤ B x} ∩ Icc a b
   have A : ContinuousOn (fun x => (f x, B x)) (Icc a b) := hf.prod hB
   have : IsClosed s := by
     simp only [s, inter_comm]
@@ -527,7 +530,7 @@ theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt (hs
     (hcont : ContinuousWithinAt f' s x) (K : ℝ≥0) (hK : ‖f' x‖₊ < K) :
     ∃ t ∈ 𝓝[s] x, LipschitzOnWith K f t :=
   by
-  obtain ⟨ε, ε0, hε⟩ : ∃ ε > 0, ball x ε ∩ s ⊆ { y | HasFDerivWithinAt f (f' y) s y ∧ ‖f' y‖₊ < K }
+  obtain ⟨ε, ε0, hε⟩ : ∃ ε > 0, ball x ε ∩ s ⊆ {y | HasFDerivWithinAt f (f' y) s y ∧ ‖f' y‖₊ < K}
   exact mem_nhds_within_iff.1 (hder.and <| hcont.nnnorm.eventually (gt_mem_nhds hK))
   rw [inter_comm] at hε 
   refine' ⟨s ∩ ball x ε, inter_mem_nhdsWithin _ (ball_mem_nhds _ ε0), _⟩
@@ -1101,7 +1104,7 @@ theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : C
   by
   by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
   · apply StrictMonoOn.exists_slope_lt_deriv_aux hf hxy hf'_mono h
-  · push_neg  at h 
+  · push_neg at h 
     rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
     obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ (a : ℝ) (H : a ∈ Ioo x w), (f w - f x) / (w - x) < deriv f a :=
       by
@@ -1152,7 +1155,7 @@ theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : C
   by
   by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
   · apply StrictMonoOn.exists_deriv_lt_slope_aux hf hxy hf'_mono h
-  · push_neg  at h 
+  · push_neg at h 
     rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
     obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ (a : ℝ) (H : a ∈ Ioo x w), deriv f a < (f w - f x) / (w - x) :=
       by
Diff
@@ -419,6 +419,7 @@ theorem constant_of_derivWithin_zero (hdiff : DifferentiableOn ℝ f (Icc a b))
 
 variable {f' g : ℝ → E}
 
+#print eq_of_has_deriv_right_eq /-
 /-- If two continuous functions on `[a, b]` have the same right derivative and are equal at `a`,
   then they are equal everywhere on `[a, b]`. -/
 theorem eq_of_has_deriv_right_eq (derivf : ∀ x ∈ Ico a b, HasDerivWithinAt f (f' x) (Ici x) x)
@@ -431,7 +432,9 @@ theorem eq_of_has_deriv_right_eq (derivf : ∀ x ∈ Ico a b, HasDerivWithinAt f
       constant_of_has_deriv_right_zero (fcont.sub gcont) fun y hy => by
         simpa only [sub_self] using (derivf y hy).sub (derivg y hy)
 #align eq_of_has_deriv_right_eq eq_of_has_deriv_right_eq
+-/
 
+#print eq_of_derivWithin_eq /-
 /-- If two differentiable functions on `[a, b]` have the same derivative within `[a, b]` everywhere
   on `[a, b)` and are equal at `a`, then they are equal everywhere on `[a, b]`. -/
 theorem eq_of_derivWithin_eq (fdiff : DifferentiableOn ℝ f (Icc a b))
@@ -447,6 +450,7 @@ theorem eq_of_derivWithin_eq (fdiff : DifferentiableOn ℝ f (Icc a b))
     eq_of_has_deriv_right_eq A (fun y hy => (hderiv hy).symm ▸ B y hy) fdiff.continuous_on
       gdiff.continuous_on hi
 #align eq_of_deriv_within_eq eq_of_derivWithin_eq
+-/
 
 end
 
@@ -468,9 +472,9 @@ variable {f g : E → G} {C : ℝ} {s : Set E} {x y : E} {f' g' : E → E →L[
 
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C`, then
 the function is `C`-Lipschitz. Version with `has_fderiv_within`. -/
-theorem norm_image_sub_le_of_norm_has_fderiv_within_le
-    (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖ ≤ C) (hs : Convex ℝ s)
-    (xs : x ∈ s) (ys : y ∈ s) : ‖f y - f x‖ ≤ C * ‖y - x‖ :=
+theorem norm_image_sub_le_of_norm_hasFDerivWithin_le (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x)
+    (bound : ∀ x ∈ s, ‖f' x‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
+    ‖f y - f x‖ ≤ C * ‖y - x‖ :=
   by
   letI : NormedSpace ℝ G := RestrictScalars.normedSpace ℝ 𝕜 G
   /- By composition with `t ↦ x + t • (y-x)`, we reduce to a statement for functions defined
@@ -498,19 +502,19 @@ theorem norm_image_sub_le_of_norm_has_fderiv_within_le
   apply norm_image_sub_le_of_norm_deriv_le_segment_01' D2
   refine' fun t ht => le_of_op_norm_le _ _ _
   exact bound (g t) (segm <| Ico_subset_Icc_self ht)
-#align convex.norm_image_sub_le_of_norm_has_fderiv_within_le Convex.norm_image_sub_le_of_norm_has_fderiv_within_le
+#align convex.norm_image_sub_le_of_norm_has_fderiv_within_le Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le
 
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C` on
 `s`, then the function is `C`-Lipschitz on `s`. Version with `has_fderiv_within` and
 `lipschitz_on_with`. -/
-theorem lipschitzOnWith_of_nnnorm_has_fderiv_within_le {C : ℝ≥0}
+theorem lipschitzOnWith_of_nnnorm_hasFDerivWithin_le {C : ℝ≥0}
     (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖₊ ≤ C)
     (hs : Convex ℝ s) : LipschitzOnWith C f s :=
   by
   rw [lipschitzOnWith_iff_norm_sub_le]
   intro x x_in y y_in
   exact hs.norm_image_sub_le_of_norm_has_fderiv_within_le hf bound y_in x_in
-#align convex.lipschitz_on_with_of_nnnorm_has_fderiv_within_le Convex.lipschitzOnWith_of_nnnorm_has_fderiv_within_le
+#align convex.lipschitz_on_with_of_nnnorm_has_fderiv_within_le Convex.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le
 
 /-- Let `s` be a convex set in a real normed vector space `E`, let `f : E → G` be a function
 differentiable within `s` in a neighborhood of `x : E` with derivative `f'`. Suppose that `f'` is
@@ -528,7 +532,7 @@ theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt (hs
   rw [inter_comm] at hε 
   refine' ⟨s ∩ ball x ε, inter_mem_nhdsWithin _ (ball_mem_nhds _ ε0), _⟩
   exact
-    (hs.inter (convex_ball _ _)).lipschitzOnWith_of_nnnorm_has_fderiv_within_le
+    (hs.inter (convex_ball _ _)).lipschitzOnWith_of_nnnorm_hasFDerivWithin_le
       (fun y hy => (hε hy).1.mono (inter_subset_left _ _)) fun y hy => (hε hy).2.le
 #align convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at_of_nnnorm_lt Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt
 
@@ -550,8 +554,8 @@ bounded by `C`, then the function is `C`-Lipschitz. Version with `fderiv_within`
 theorem norm_image_sub_le_of_norm_fderivWithin_le (hf : DifferentiableOn 𝕜 f s)
     (bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x‖ ≤ C * ‖y - x‖ :=
-  hs.norm_image_sub_le_of_norm_has_fderiv_within_le (fun x hx => (hf x hx).HasFDerivWithinAt) bound
-    xs ys
+  hs.norm_image_sub_le_of_norm_hasFDerivWithin_le (fun x hx => (hf x hx).HasFDerivWithinAt) bound xs
+    ys
 #align convex.norm_image_sub_le_of_norm_fderiv_within_le Convex.norm_image_sub_le_of_norm_fderivWithin_le
 
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C` on
@@ -559,7 +563,7 @@ theorem norm_image_sub_le_of_norm_fderivWithin_le (hf : DifferentiableOn 𝕜 f
 `lipschitz_on_with`. -/
 theorem lipschitzOnWith_of_nnnorm_fderivWithin_le {C : ℝ≥0} (hf : DifferentiableOn 𝕜 f s)
     (bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x‖₊ ≤ C) (hs : Convex ℝ s) : LipschitzOnWith C f s :=
-  hs.lipschitzOnWith_of_nnnorm_has_fderiv_within_le (fun x hx => (hf x hx).HasFDerivWithinAt) bound
+  hs.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le (fun x hx => (hf x hx).HasFDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_fderiv_within_le Convex.lipschitzOnWith_of_nnnorm_fderivWithin_le
 
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C`,
@@ -567,7 +571,7 @@ then the function is `C`-Lipschitz. Version with `fderiv`. -/
 theorem norm_image_sub_le_of_norm_fderiv_le (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
     (bound : ∀ x ∈ s, ‖fderiv 𝕜 f x‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x‖ ≤ C * ‖y - x‖ :=
-  hs.norm_image_sub_le_of_norm_has_fderiv_within_le
+  hs.norm_image_sub_le_of_norm_hasFDerivWithin_le
     (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_le Convex.norm_image_sub_le_of_norm_fderiv_le
 
@@ -575,16 +579,16 @@ theorem norm_image_sub_le_of_norm_fderiv_le (hf : ∀ x ∈ s, DifferentiableAt
 `s`, then the function is `C`-Lipschitz on `s`. Version with `fderiv` and `lipschitz_on_with`. -/
 theorem lipschitzOnWith_of_nnnorm_fderiv_le {C : ℝ≥0} (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
     (bound : ∀ x ∈ s, ‖fderiv 𝕜 f x‖₊ ≤ C) (hs : Convex ℝ s) : LipschitzOnWith C f s :=
-  hs.lipschitzOnWith_of_nnnorm_has_fderiv_within_le
+  hs.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le
     (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_fderiv_le Convex.lipschitzOnWith_of_nnnorm_fderiv_le
 
 /-- Variant of the mean value inequality on a convex set, using a bound on the difference between
 the derivative and a fixed linear map, rather than a bound on the derivative itself. Version with
 `has_fderiv_within`. -/
-theorem norm_image_sub_le_of_norm_has_fderiv_within_le'
-    (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x - φ‖ ≤ C)
-    (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) : ‖f y - f x - φ (y - x)‖ ≤ C * ‖y - x‖ :=
+theorem norm_image_sub_le_of_norm_hasFDerivWithin_le' (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x)
+    (bound : ∀ x ∈ s, ‖f' x - φ‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
+    ‖f y - f x - φ (y - x)‖ ≤ C * ‖y - x‖ :=
   by
   /- We subtract `φ` to define a new function `g` for which `g' = 0`, for which the previous theorem
     applies, `convex.norm_image_sub_le_of_norm_has_fderiv_within_le`. Then, we just need to glue
@@ -596,15 +600,15 @@ theorem norm_image_sub_le_of_norm_has_fderiv_within_le'
     ‖f y - f x - φ (y - x)‖ = ‖f y - f x - (φ y - φ x)‖ := by simp
     _ = ‖f y - φ y - (f x - φ x)‖ := by abel
     _ = ‖g y - g x‖ := by simp
-    _ ≤ C * ‖y - x‖ := Convex.norm_image_sub_le_of_norm_has_fderiv_within_le hg bound hs xs ys
+    _ ≤ C * ‖y - x‖ := Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le hg bound hs xs ys
     
-#align convex.norm_image_sub_le_of_norm_has_fderiv_within_le' Convex.norm_image_sub_le_of_norm_has_fderiv_within_le'
+#align convex.norm_image_sub_le_of_norm_has_fderiv_within_le' Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le'
 
 /-- Variant of the mean value inequality on a convex set. Version with `fderiv_within`. -/
 theorem norm_image_sub_le_of_norm_fderivWithin_le' (hf : DifferentiableOn 𝕜 f s)
     (bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x - φ‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x - φ (y - x)‖ ≤ C * ‖y - x‖ :=
-  hs.norm_image_sub_le_of_norm_has_fderiv_within_le' (fun x hx => (hf x hx).HasFDerivWithinAt) bound
+  hs.norm_image_sub_le_of_norm_hasFDerivWithin_le' (fun x hx => (hf x hx).HasFDerivWithinAt) bound
     xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_within_le' Convex.norm_image_sub_le_of_norm_fderivWithin_le'
 
@@ -612,7 +616,7 @@ theorem norm_image_sub_le_of_norm_fderivWithin_le' (hf : DifferentiableOn 𝕜 f
 theorem norm_image_sub_le_of_norm_fderiv_le' (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
     (bound : ∀ x ∈ s, ‖fderiv 𝕜 f x - φ‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x - φ (y - x)‖ ≤ C * ‖y - x‖ :=
-  hs.norm_image_sub_le_of_norm_has_fderiv_within_le'
+  hs.norm_image_sub_le_of_norm_hasFDerivWithin_le'
     (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_le' Convex.norm_image_sub_le_of_norm_fderiv_le'
 
@@ -660,29 +664,29 @@ variable {f f' : 𝕜 → G} {s : Set 𝕜} {x y : 𝕜}
 
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
 bounded by `C`, then the function is `C`-Lipschitz. Version with `has_deriv_within`. -/
-theorem norm_image_sub_le_of_norm_has_deriv_within_le {C : ℝ}
+theorem norm_image_sub_le_of_norm_hasDerivWithin_le {C : ℝ}
     (hf : ∀ x ∈ s, HasDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖ ≤ C) (hs : Convex ℝ s)
     (xs : x ∈ s) (ys : y ∈ s) : ‖f y - f x‖ ≤ C * ‖y - x‖ :=
-  Convex.norm_image_sub_le_of_norm_has_fderiv_within_le (fun x hx => (hf x hx).HasFDerivWithinAt)
+  Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le (fun x hx => (hf x hx).HasFDerivWithinAt)
     (fun x hx => le_trans (by simp) (bound x hx)) hs xs ys
-#align convex.norm_image_sub_le_of_norm_has_deriv_within_le Convex.norm_image_sub_le_of_norm_has_deriv_within_le
+#align convex.norm_image_sub_le_of_norm_has_deriv_within_le Convex.norm_image_sub_le_of_norm_hasDerivWithin_le
 
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
 bounded by `C` on `s`, then the function is `C`-Lipschitz on `s`.
 Version with `has_deriv_within` and `lipschitz_on_with`. -/
-theorem lipschitzOnWith_of_nnnorm_has_deriv_within_le {C : ℝ≥0} (hs : Convex ℝ s)
+theorem lipschitzOnWith_of_nnnorm_hasDerivWithin_le {C : ℝ≥0} (hs : Convex ℝ s)
     (hf : ∀ x ∈ s, HasDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖₊ ≤ C) :
     LipschitzOnWith C f s :=
-  Convex.lipschitzOnWith_of_nnnorm_has_fderiv_within_le (fun x hx => (hf x hx).HasFDerivWithinAt)
+  Convex.lipschitzOnWith_of_nnnorm_hasFDerivWithin_le (fun x hx => (hf x hx).HasFDerivWithinAt)
     (fun x hx => le_trans (by simp) (bound x hx)) hs
-#align convex.lipschitz_on_with_of_nnnorm_has_deriv_within_le Convex.lipschitzOnWith_of_nnnorm_has_deriv_within_le
+#align convex.lipschitz_on_with_of_nnnorm_has_deriv_within_le Convex.lipschitzOnWith_of_nnnorm_hasDerivWithin_le
 
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function within
 this set is bounded by `C`, then the function is `C`-Lipschitz. Version with `deriv_within` -/
 theorem norm_image_sub_le_of_norm_derivWithin_le {C : ℝ} (hf : DifferentiableOn 𝕜 f s)
     (bound : ∀ x ∈ s, ‖derivWithin f s x‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x‖ ≤ C * ‖y - x‖ :=
-  hs.norm_image_sub_le_of_norm_has_deriv_within_le (fun x hx => (hf x hx).HasDerivWithinAt) bound xs
+  hs.norm_image_sub_le_of_norm_hasDerivWithin_le (fun x hx => (hf x hx).HasDerivWithinAt) bound xs
     ys
 #align convex.norm_image_sub_le_of_norm_deriv_within_le Convex.norm_image_sub_le_of_norm_derivWithin_le
 
@@ -692,7 +696,7 @@ Version with `deriv_within` and `lipschitz_on_with`. -/
 theorem lipschitzOnWith_of_nnnorm_derivWithin_le {C : ℝ≥0} (hs : Convex ℝ s)
     (hf : DifferentiableOn 𝕜 f s) (bound : ∀ x ∈ s, ‖derivWithin f s x‖₊ ≤ C) :
     LipschitzOnWith C f s :=
-  hs.lipschitzOnWith_of_nnnorm_has_deriv_within_le (fun x hx => (hf x hx).HasDerivWithinAt) bound
+  hs.lipschitzOnWith_of_nnnorm_hasDerivWithin_le (fun x hx => (hf x hx).HasDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_deriv_within_le Convex.lipschitzOnWith_of_nnnorm_derivWithin_le
 
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
@@ -700,8 +704,8 @@ bounded by `C`, then the function is `C`-Lipschitz. Version with `deriv`. -/
 theorem norm_image_sub_le_of_norm_deriv_le {C : ℝ} (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
     (bound : ∀ x ∈ s, ‖deriv f x‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x‖ ≤ C * ‖y - x‖ :=
-  hs.norm_image_sub_le_of_norm_has_deriv_within_le
-    (fun x hx => (hf x hx).HasDerivAt.HasDerivWithinAt) bound xs ys
+  hs.norm_image_sub_le_of_norm_hasDerivWithin_le (fun x hx => (hf x hx).HasDerivAt.HasDerivWithinAt)
+    bound xs ys
 #align convex.norm_image_sub_le_of_norm_deriv_le Convex.norm_image_sub_le_of_norm_deriv_le
 
 /-- The mean value theorem on a convex set in dimension 1: if the derivative of a function is
@@ -709,8 +713,8 @@ bounded by `C` on `s`, then the function is `C`-Lipschitz on `s`.
 Version with `deriv` and `lipschitz_on_with`. -/
 theorem lipschitzOnWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
     (bound : ∀ x ∈ s, ‖deriv f x‖₊ ≤ C) (hs : Convex ℝ s) : LipschitzOnWith C f s :=
-  hs.lipschitzOnWith_of_nnnorm_has_deriv_within_le
-    (fun x hx => (hf x hx).HasDerivAt.HasDerivWithinAt) bound
+  hs.lipschitzOnWith_of_nnnorm_hasDerivWithin_le (fun x hx => (hf x hx).HasDerivAt.HasDerivWithinAt)
+    bound
 #align convex.lipschitz_on_with_of_nnnorm_deriv_le Convex.lipschitzOnWith_of_nnnorm_deriv_le
 
 /-- The mean value theorem set in dimension 1: if the derivative of a function is bounded by `C`,
@@ -1447,7 +1451,7 @@ theorem hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
     exact le_of_lt (hε H').2
   -- apply mean value theorem
   letI : NormedSpace ℝ G := RestrictScalars.normedSpace ℝ 𝕜 G
-  refine' (convex_ball _ _).norm_image_sub_le_of_norm_has_fderiv_within_le' _ hf' h.2 h.1
+  refine' (convex_ball _ _).norm_image_sub_le_of_norm_hasFDerivWithin_le' _ hf' h.2 h.1
   exact fun y hy => (hε hy).1.HasFDerivWithinAt
 #align has_strict_fderiv_at_of_has_fderiv_at_of_continuous_at hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
 
Diff
@@ -124,7 +124,7 @@ theorem image_le_of_liminf_slope_right_lt_deriv_boundary' {f f' : ℝ → ℝ} {
     refine' ⟨z, _, hz⟩
     have := (hfz.trans hzB).le
     rwa [slope_def_field, slope_def_field, div_le_div_right (sub_pos.2 hz.1), hxB,
-      sub_le_sub_iff_right] at this
+      sub_le_sub_iff_right] at this 
 #align image_le_of_liminf_slope_right_lt_deriv_boundary' image_le_of_liminf_slope_right_lt_deriv_boundary'
 
 /-- General fencing theorem for continuous functions with an estimate on the derivative.
@@ -425,7 +425,7 @@ theorem eq_of_has_deriv_right_eq (derivf : ∀ x ∈ Ico a b, HasDerivWithinAt f
     (derivg : ∀ x ∈ Ico a b, HasDerivWithinAt g (f' x) (Ici x) x) (fcont : ContinuousOn f (Icc a b))
     (gcont : ContinuousOn g (Icc a b)) (hi : f a = g a) : ∀ y ∈ Icc a b, f y = g y :=
   by
-  simp only [← @sub_eq_zero _ _ (f _)] at hi⊢
+  simp only [← @sub_eq_zero _ _ (f _)] at hi ⊢
   exact
     hi ▸
       constant_of_has_deriv_right_zero (fcont.sub gcont) fun y hy => by
@@ -525,7 +525,7 @@ theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt (hs
   by
   obtain ⟨ε, ε0, hε⟩ : ∃ ε > 0, ball x ε ∩ s ⊆ { y | HasFDerivWithinAt f (f' y) s y ∧ ‖f' y‖₊ < K }
   exact mem_nhds_within_iff.1 (hder.and <| hcont.nnnorm.eventually (gt_mem_nhds hK))
-  rw [inter_comm] at hε
+  rw [inter_comm] at hε 
   refine' ⟨s ∩ ball x ε, inter_mem_nhdsWithin _ (ball_mem_nhds _ ε0), _⟩
   exact
     (hs.inter (convex_ball _ _)).lipschitzOnWith_of_nnnorm_has_fderiv_within_le
@@ -639,7 +639,7 @@ theorem eqOn_of_fderivWithin_eq (hs : Convex ℝ s) (hf : DifferentiableOn 𝕜
     (hf' : ∀ x ∈ s, fderivWithin 𝕜 f s x = fderivWithin 𝕜 g s x) (hx : x ∈ s) (hfgx : f x = g x) :
     s.EqOn f g := by
   intro y hy
-  suffices f x - g x = f y - g y by rwa [hfgx, sub_self, eq_comm, sub_eq_zero] at this
+  suffices f x - g x = f y - g y by rwa [hfgx, sub_self, eq_comm, sub_eq_zero] at this 
   refine' hs.is_const_of_fderiv_within_eq_zero (hf.sub hg) _ hx hy
   intro z hz
   rw [fderivWithin_sub (hs' _ hz) (hf _ hz) (hg _ hz), sub_eq_zero, hf' _ hz]
@@ -802,7 +802,7 @@ theorem exists_hasDerivAt_eq_slope : ∃ c ∈ Ioo a b, f' c = (f b - f a) / (b
       fun x hx => hasDerivAt_id x with
     ⟨c, cmem, hc⟩
   use c, cmem
-  simp only [_root_.id, Pi.one_apply, mul_one] at hc
+  simp only [_root_.id, Pi.one_apply, mul_one] at hc 
   rw [← hc, mul_div_cancel_left]
   exact ne_of_gt (sub_pos.2 hab)
 #align exists_has_deriv_at_eq_slope exists_hasDerivAt_eq_slope
@@ -1097,9 +1097,9 @@ theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : C
   by
   by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
   · apply StrictMonoOn.exists_slope_lt_deriv_aux hf hxy hf'_mono h
-  · push_neg  at h
+  · push_neg  at h 
     rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
-    obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ (a : ℝ)(H : a ∈ Ioo x w), (f w - f x) / (w - x) < deriv f a :=
+    obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ (a : ℝ) (H : a ∈ Ioo x w), (f w - f x) / (w - x) < deriv f a :=
       by
       apply StrictMonoOn.exists_slope_lt_deriv_aux _ hxw _ _
       · exact hf.mono (Icc_subset_Icc le_rfl hwy.le)
@@ -1108,7 +1108,7 @@ theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : C
         rw [← hw]
         apply ne_of_lt
         exact hf'_mono ⟨hz.1, hz.2.trans hwy⟩ ⟨hxw, hwy⟩ hz.2
-    obtain ⟨b, ⟨hwb, hby⟩, hb⟩ : ∃ (b : ℝ)(H : b ∈ Ioo w y), (f y - f w) / (y - w) < deriv f b :=
+    obtain ⟨b, ⟨hwb, hby⟩, hb⟩ : ∃ (b : ℝ) (H : b ∈ Ioo w y), (f y - f w) / (y - w) < deriv f b :=
       by
       apply StrictMonoOn.exists_slope_lt_deriv_aux _ hwy _ _
       · refine' hf.mono (Icc_subset_Icc hxw.le le_rfl)
@@ -1118,7 +1118,7 @@ theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : C
         apply ne_of_gt
         exact hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hz.1, hz.2⟩ hz.1
     refine' ⟨b, ⟨hxw.trans hwb, hby⟩, _⟩
-    simp only [div_lt_iff, hxy, hxw, hwy, sub_pos] at ha hb⊢
+    simp only [div_lt_iff, hxy, hxw, hwy, sub_pos] at ha hb ⊢
     have : deriv f a * (w - x) < deriv f b * (w - x) :=
       by
       apply mul_lt_mul _ le_rfl (sub_pos.2 hxw) _
@@ -1148,9 +1148,9 @@ theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : C
   by
   by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
   · apply StrictMonoOn.exists_deriv_lt_slope_aux hf hxy hf'_mono h
-  · push_neg  at h
+  · push_neg  at h 
     rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
-    obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ (a : ℝ)(H : a ∈ Ioo x w), deriv f a < (f w - f x) / (w - x) :=
+    obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ (a : ℝ) (H : a ∈ Ioo x w), deriv f a < (f w - f x) / (w - x) :=
       by
       apply StrictMonoOn.exists_deriv_lt_slope_aux _ hxw _ _
       · exact hf.mono (Icc_subset_Icc le_rfl hwy.le)
@@ -1159,7 +1159,7 @@ theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : C
         rw [← hw]
         apply ne_of_lt
         exact hf'_mono ⟨hz.1, hz.2.trans hwy⟩ ⟨hxw, hwy⟩ hz.2
-    obtain ⟨b, ⟨hwb, hby⟩, hb⟩ : ∃ (b : ℝ)(H : b ∈ Ioo w y), deriv f b < (f y - f w) / (y - w) :=
+    obtain ⟨b, ⟨hwb, hby⟩, hb⟩ : ∃ (b : ℝ) (H : b ∈ Ioo w y), deriv f b < (f y - f w) / (y - w) :=
       by
       apply StrictMonoOn.exists_deriv_lt_slope_aux _ hwy _ _
       · refine' hf.mono (Icc_subset_Icc hxw.le le_rfl)
@@ -1169,7 +1169,7 @@ theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : C
         apply ne_of_gt
         exact hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hz.1, hz.2⟩ hz.1
     refine' ⟨a, ⟨hxa, haw.trans hwy⟩, _⟩
-    simp only [lt_div_iff, hxy, hxw, hwy, sub_pos] at ha hb⊢
+    simp only [lt_div_iff, hxy, hxw, hwy, sub_pos] at ha hb ⊢
     have : deriv f a * (y - w) < deriv f b * (y - w) :=
       by
       apply mul_lt_mul _ le_rfl (sub_pos.2 hwy) _
@@ -1388,7 +1388,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
     intro t ht; exact ⟨t, ht, rfl⟩
   have hseg' : Icc 0 1 ⊆ g ⁻¹' s := by
     rw [← image_subset_iff]; unfold image; change ∀ _, _
-    intro z Hz; rw [mem_set_of_eq] at Hz; rcases Hz with ⟨t, Ht, hgt⟩
+    intro z Hz; rw [mem_set_of_eq] at Hz ; rcases Hz with ⟨t, Ht, hgt⟩
     rw [← hgt]; exact hs.segment_subset xs ys (hseg t Ht)
   -- derivative of pullback of f under parametrization
   have hfg :
@@ -1398,7 +1398,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
     have hg : HasDerivAt g (y - x) t :=
       by
       have := ((hasDerivAt_id t).smul_const (y - x)).const_add x
-      rwa [one_smul] at this
+      rwa [one_smul] at this 
     exact (hf (g t) <| hseg' Ht).comp_hasDerivWithinAt _ hg.has_deriv_within_at hseg'
   -- apply 1-variable mean value theorem to pullback
   have hMVT : ∃ t ∈ Ioo (0 : ℝ) 1, (f' (g t) : E → ℝ) (y - x) = (f (g 1) - f (g 0)) / (1 - 0) :=
@@ -1441,7 +1441,7 @@ theorem hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
   refine' ⟨ε, ε0, _⟩
   -- simplify formulas involving the product E × E
   rintro ⟨a, b⟩ h
-  rw [← ball_prod_same, prod_mk_mem_set_prod_eq] at h
+  rw [← ball_prod_same, prod_mk_mem_set_prod_eq] at h 
   -- exploit the choice of ε as the modulus of continuity of f'
   have hf' : ∀ x' ∈ ball x ε, ‖f' x' - f' x‖ ≤ c := by intro x' H'; rw [← dist_eq_norm];
     exact le_of_lt (hε H').2
Diff
@@ -76,7 +76,7 @@ variable {E : Type _} [NormedAddCommGroup E] [NormedSpace ℝ E] {F : Type _} [N
 
 open Metric Set Asymptotics ContinuousLinearMap Filter
 
-open Classical Topology NNReal
+open scoped Classical Topology NNReal
 
 /-! ### One-dimensional fencing inequalities -/
 
Diff
@@ -355,8 +355,7 @@ theorem norm_image_sub_le_of_norm_deriv_right_le_segment {f' : ℝ → E} {C : 
     intro x
     simpa using (hasDerivAt_const x C).mul ((hasDerivAt_id x).sub (hasDerivAt_const x a))
   convert image_norm_le_of_norm_deriv_right_le_deriv_boundary hg hg' _ hB bound
-  simp only [g, B]
-  rw [sub_self, norm_zero, sub_self, MulZeroClass.mul_zero]
+  simp only [g, B]; rw [sub_self, norm_zero, sub_self, MulZeroClass.mul_zero]
 #align norm_image_sub_le_of_norm_deriv_right_le_segment norm_image_sub_le_of_norm_deriv_right_le_segment
 
 /-- A function on `[a, b]` with the norm of the derivative within `[a, b]`
@@ -487,13 +486,9 @@ theorem norm_image_sub_le_of_norm_has_fderiv_within_le
     by
     rw [← image_subset_iff, ← segment_eq_image']
     apply hs.segment_subset xs ys
-  have : f x = f (g 0) := by
-    simp only [g]
-    rw [zero_smul, add_zero]
+  have : f x = f (g 0) := by simp only [g]; rw [zero_smul, add_zero]
   rw [this]
-  have : f y = f (g 1) := by
-    simp only [g]
-    rw [one_smul, add_sub_cancel'_right]
+  have : f y = f (g 1) := by simp only [g]; rw [one_smul, add_sub_cancel'_right]
   rw [this]
   have D2 : ∀ t ∈ Icc (0 : ℝ) 1, HasDerivWithinAt (f ∘ g) (f' (g t) (y - x)) (Icc 0 1) t :=
     by
@@ -730,11 +725,7 @@ theorem lipschitzWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : Differentiable 𝕜
 then it is a constant function. -/
 theorem is_const_of_deriv_eq_zero (hf : Differentiable 𝕜 f) (hf' : ∀ x, deriv f x = 0) (x y : 𝕜) :
     f x = f y :=
-  is_const_of_fderiv_eq_zero hf
-    (fun z => by
-      ext
-      simp [← deriv_fderiv, hf'])
-    _ _
+  is_const_of_fderiv_eq_zero hf (fun z => by ext; simp [← deriv_fderiv, hf']) _ _
 #align is_const_of_deriv_eq_zero is_const_of_deriv_eq_zero
 
 end Convex
@@ -759,9 +750,7 @@ theorem exists_ratio_hasDerivAt_eq_ratio_slope :
     ∃ c ∈ Ioo a b, (g b - g a) * f' c = (f b - f a) * g' c :=
   by
   let h x := (g b - g a) * f x - (f b - f a) * g x
-  have hI : h a = h b := by
-    simp only [h]
-    ring
+  have hI : h a = h b := by simp only [h]; ring
   let h' x := (g b - g a) * f' x - (f b - f a) * g' x
   have hhh' : ∀ x ∈ Ioo a b, HasDerivAt h (h' x) x := fun x hx =>
     ((hff' x hx).const_mul (g b - g a)).sub ((hgg' x hx).const_mul (f b - f a))
@@ -866,9 +855,7 @@ theorem Convex.mul_sub_lt_image_sub_of_lt_deriv {D : Set ℝ} (hD : Convex ℝ D
     subset_sUnion_of_mem ⟨isOpen_Ioo, subset.trans Ioo_subset_Icc_self hxyD⟩
   obtain ⟨a, a_mem, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x)
   exact exists_deriv_eq_slope f hxy (hf.mono hxyD) (hf'.mono hxyD')
-  have : C < (f y - f x) / (y - x) := by
-    rw [← ha]
-    exact hf'_gt _ (hxyD' a_mem)
+  have : C < (f y - f x) / (y - x) := by rw [← ha]; exact hf'_gt _ (hxyD' a_mem)
   exact (lt_div_iff (sub_pos.2 hxy)).1 this
 #align convex.mul_sub_lt_image_sub_of_lt_deriv Convex.mul_sub_lt_image_sub_of_lt_deriv
 
@@ -891,16 +878,13 @@ theorem Convex.mul_sub_le_image_sub_of_le_deriv {D : Set ℝ} (hD : Convex ℝ D
     ∀ (x) (_ : x ∈ D) (y) (_ : y ∈ D), x ≤ y → C * (y - x) ≤ f y - f x :=
   by
   intro x hx y hy hxy
-  cases' eq_or_lt_of_le hxy with hxy' hxy'
-  · rw [hxy', sub_self, sub_self, MulZeroClass.mul_zero]
+  cases' eq_or_lt_of_le hxy with hxy' hxy'; · rw [hxy', sub_self, sub_self, MulZeroClass.mul_zero]
   have hxyD : Icc x y ⊆ D := hD.ord_connected.out hx hy
   have hxyD' : Ioo x y ⊆ interior D :=
     subset_sUnion_of_mem ⟨isOpen_Ioo, subset.trans Ioo_subset_Icc_self hxyD⟩
   obtain ⟨a, a_mem, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x)
   exact exists_deriv_eq_slope f hxy' (hf.mono hxyD) (hf'.mono hxyD')
-  have : C ≤ (f y - f x) / (y - x) := by
-    rw [← ha]
-    exact hf'_ge _ (hxyD' a_mem)
+  have : C ≤ (f y - f x) / (y - x) := by rw [← ha]; exact hf'_ge _ (hxyD' a_mem)
   exact (le_div_iff (sub_pos.2 hxy')).1 this
 #align convex.mul_sub_le_image_sub_of_le_deriv Convex.mul_sub_le_image_sub_of_le_deriv
 
@@ -1401,17 +1385,11 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
     by
     rw [segment_eq_image']
     simp only [mem_image, and_imp, add_right_inj]
-    intro t ht
-    exact ⟨t, ht, rfl⟩
+    intro t ht; exact ⟨t, ht, rfl⟩
   have hseg' : Icc 0 1 ⊆ g ⁻¹' s := by
-    rw [← image_subset_iff]
-    unfold image
-    change ∀ _, _
-    intro z Hz
-    rw [mem_set_of_eq] at Hz
-    rcases Hz with ⟨t, Ht, hgt⟩
-    rw [← hgt]
-    exact hs.segment_subset xs ys (hseg t Ht)
+    rw [← image_subset_iff]; unfold image; change ∀ _, _
+    intro z Hz; rw [mem_set_of_eq] at Hz; rcases Hz with ⟨t, Ht, hgt⟩
+    rw [← hgt]; exact hs.segment_subset xs ys (hseg t Ht)
   -- derivative of pullback of f under parametrization
   have hfg :
     ∀ t ∈ Icc (0 : ℝ) 1, HasDerivWithinAt (f ∘ g) ((f' (g t) : E → ℝ) (y - x)) (Icc (0 : ℝ) 1) t :=
@@ -1430,8 +1408,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
     · exact fun t Ht => (hfg t <| hIccIoo Ht).HasDerivAt (Icc_mem_nhds Ht.1 Ht.2)
   -- reinterpret on domain
   rcases hMVT with ⟨t, Ht, hMVT'⟩
-  use g t
-  refine' ⟨hseg t <| hIccIoo Ht, _⟩
+  use g t; refine' ⟨hseg t <| hIccIoo Ht, _⟩
   simp [g, hMVT']
 #align domain_mvt domain_mvt
 
@@ -1466,10 +1443,7 @@ theorem hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
   rintro ⟨a, b⟩ h
   rw [← ball_prod_same, prod_mk_mem_set_prod_eq] at h
   -- exploit the choice of ε as the modulus of continuity of f'
-  have hf' : ∀ x' ∈ ball x ε, ‖f' x' - f' x‖ ≤ c :=
-    by
-    intro x' H'
-    rw [← dist_eq_norm]
+  have hf' : ∀ x' ∈ ball x ε, ‖f' x' - f' x‖ ≤ c := by intro x' H'; rw [← dist_eq_norm];
     exact le_of_lt (hε H').2
   -- apply mean value theorem
   letI : NormedSpace ℝ G := RestrictScalars.normedSpace ℝ 𝕜 G
Diff
@@ -4,10 +4,11 @@ Released under Apache 2.0 license as described in the file LICENSE.
 Authors: Sébastien Gouëzel, Yury Kudryashov
 
 ! This file was ported from Lean 3 source module analysis.calculus.mean_value
-! leanprover-community/mathlib commit 75e7fca56381d056096ce5d05e938f63a6567828
+! leanprover-community/mathlib commit 3bce8d800a6f2b8f63fe1e588fd76a9ff4adcebe
 ! Please do not edit these lines, except to modify the commit id
 ! if you have ported upstream changes.
 -/
+import Mathbin.Analysis.Calculus.Deriv.Slope
 import Mathbin.Analysis.Calculus.LocalExtr
 import Mathbin.Analysis.Convex.Slope
 import Mathbin.Analysis.Convex.Normed
Diff
@@ -4,7 +4,7 @@ Released under Apache 2.0 license as described in the file LICENSE.
 Authors: Sébastien Gouëzel, Yury Kudryashov
 
 ! This file was ported from Lean 3 source module analysis.calculus.mean_value
-! leanprover-community/mathlib commit f2ce6086713c78a7f880485f7917ea547a215982
+! leanprover-community/mathlib commit 75e7fca56381d056096ce5d05e938f63a6567828
 ! Please do not edit these lines, except to modify the commit id
 ! if you have ported upstream changes.
 -/
@@ -464,7 +464,7 @@ variable {𝕜 G : Type _} [IsROrC 𝕜] [NormedSpace 𝕜 E] [NormedAddCommGrou
 
 namespace Convex
 
-variable {f : E → G} {C : ℝ} {s : Set E} {x y : E} {f' : E → E →L[𝕜] G} {φ : E →L[𝕜] G}
+variable {f g : E → G} {C : ℝ} {s : Set E} {x y : E} {f' g' : E → E →L[𝕜] G} {φ : E →L[𝕜] G}
 
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C`, then
 the function is `C`-Lipschitz. Version with `has_fderiv_within`. -/
@@ -636,6 +636,26 @@ theorem is_const_of_fderiv_eq_zero (hf : Differentiable 𝕜 f) (hf' : ∀ x, fd
     (fun x _ => by rw [fderivWithin_univ] <;> exact hf' x) trivial trivial
 #align is_const_of_fderiv_eq_zero is_const_of_fderiv_eq_zero
 
+/-- If two functions have equal Fréchet derivatives at every point of a convex set, and are equal at
+one point in that set, then they are equal on that set. -/
+theorem eqOn_of_fderivWithin_eq (hs : Convex ℝ s) (hf : DifferentiableOn 𝕜 f s)
+    (hg : DifferentiableOn 𝕜 g s) (hs' : UniqueDiffOn 𝕜 s)
+    (hf' : ∀ x ∈ s, fderivWithin 𝕜 f s x = fderivWithin 𝕜 g s x) (hx : x ∈ s) (hfgx : f x = g x) :
+    s.EqOn f g := by
+  intro y hy
+  suffices f x - g x = f y - g y by rwa [hfgx, sub_self, eq_comm, sub_eq_zero] at this
+  refine' hs.is_const_of_fderiv_within_eq_zero (hf.sub hg) _ hx hy
+  intro z hz
+  rw [fderivWithin_sub (hs' _ hz) (hf _ hz) (hg _ hz), sub_eq_zero, hf' _ hz]
+#align convex.eq_on_of_fderiv_within_eq Convex.eqOn_of_fderivWithin_eq
+
+theorem eq_of_fderiv_eq (hf : Differentiable 𝕜 f) (hg : Differentiable 𝕜 g)
+    (hf' : ∀ x, fderiv 𝕜 f x = fderiv 𝕜 g x) (x : E) (hfgx : f x = g x) : f = g :=
+  suffices Set.univ.EqOn f g from funext fun x => this <| mem_univ x
+  convex_univ.eqOn_of_fderivWithin_eq hf.DifferentiableOn hg.DifferentiableOn uniqueDiffOn_univ
+    (fun x hx => by simpa using hf' _) (mem_univ _) hfgx
+#align eq_of_fderiv_eq eq_of_fderiv_eq
+
 end Convex
 
 namespace Convex
Diff
@@ -469,7 +469,7 @@ variable {f : E → G} {C : ℝ} {s : Set E} {x y : E} {f' : E → E →L[𝕜]
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C`, then
 the function is `C`-Lipschitz. Version with `has_fderiv_within`. -/
 theorem norm_image_sub_le_of_norm_has_fderiv_within_le
-    (hf : ∀ x ∈ s, HasFderivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖ ≤ C) (hs : Convex ℝ s)
+    (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖ ≤ C) (hs : Convex ℝ s)
     (xs : x ∈ s) (ys : y ∈ s) : ‖f y - f x‖ ≤ C * ‖y - x‖ :=
   by
   letI : NormedSpace ℝ G := RestrictScalars.normedSpace ℝ 𝕜 G
@@ -497,7 +497,7 @@ theorem norm_image_sub_le_of_norm_has_fderiv_within_le
   have D2 : ∀ t ∈ Icc (0 : ℝ) 1, HasDerivWithinAt (f ∘ g) (f' (g t) (y - x)) (Icc 0 1) t :=
     by
     intro t ht
-    have : HasFderivWithinAt f ((f' (g t)).restrictScalars ℝ) s (g t) := hf (g t) (segm ht)
+    have : HasFDerivWithinAt f ((f' (g t)).restrictScalars ℝ) s (g t) := hf (g t) (segm ht)
     exact this.comp_has_deriv_within_at _ (Dg t).HasDerivWithinAt segm
   apply norm_image_sub_le_of_norm_deriv_le_segment_01' D2
   refine' fun t ht => le_of_op_norm_le _ _ _
@@ -508,7 +508,7 @@ theorem norm_image_sub_le_of_norm_has_fderiv_within_le
 `s`, then the function is `C`-Lipschitz on `s`. Version with `has_fderiv_within` and
 `lipschitz_on_with`. -/
 theorem lipschitzOnWith_of_nnnorm_has_fderiv_within_le {C : ℝ≥0}
-    (hf : ∀ x ∈ s, HasFderivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖₊ ≤ C)
+    (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖₊ ≤ C)
     (hs : Convex ℝ s) : LipschitzOnWith C f s :=
   by
   rw [lipschitzOnWith_iff_norm_sub_le]
@@ -522,19 +522,19 @@ continuous within `s` at `x`. Then for any number `K : ℝ≥0` larger than `‖
 `K`-Lipschitz on some neighborhood of `x` within `s`. See also
 `convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at` for a version that claims
 existence of `K` instead of an explicit estimate. -/
-theorem exists_nhdsWithin_lipschitzOnWith_of_hasFderivWithinAt_of_nnnorm_lt (hs : Convex ℝ s)
-    {f : E → G} (hder : ∀ᶠ y in 𝓝[s] x, HasFderivWithinAt f (f' y) s y)
+theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt (hs : Convex ℝ s)
+    {f : E → G} (hder : ∀ᶠ y in 𝓝[s] x, HasFDerivWithinAt f (f' y) s y)
     (hcont : ContinuousWithinAt f' s x) (K : ℝ≥0) (hK : ‖f' x‖₊ < K) :
     ∃ t ∈ 𝓝[s] x, LipschitzOnWith K f t :=
   by
-  obtain ⟨ε, ε0, hε⟩ : ∃ ε > 0, ball x ε ∩ s ⊆ { y | HasFderivWithinAt f (f' y) s y ∧ ‖f' y‖₊ < K }
+  obtain ⟨ε, ε0, hε⟩ : ∃ ε > 0, ball x ε ∩ s ⊆ { y | HasFDerivWithinAt f (f' y) s y ∧ ‖f' y‖₊ < K }
   exact mem_nhds_within_iff.1 (hder.and <| hcont.nnnorm.eventually (gt_mem_nhds hK))
   rw [inter_comm] at hε
   refine' ⟨s ∩ ball x ε, inter_mem_nhdsWithin _ (ball_mem_nhds _ ε0), _⟩
   exact
     (hs.inter (convex_ball _ _)).lipschitzOnWith_of_nnnorm_has_fderiv_within_le
       (fun y hy => (hε hy).1.mono (inter_subset_left _ _)) fun y hy => (hε hy).2.le
-#align convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at_of_nnnorm_lt Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFderivWithinAt_of_nnnorm_lt
+#align convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at_of_nnnorm_lt Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt
 
 /-- Let `s` be a convex set in a real normed vector space `E`, let `f : E → G` be a function
 differentiable within `s` in a neighborhood of `x : E` with derivative `f'`. Suppose that `f'` is
@@ -542,19 +542,19 @@ continuous within `s` at `x`. Then for any number `K : ℝ≥0` larger than `‖
 on some neighborhood of `x` within `s`. See also
 `convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at_of_nnnorm_lt` for a version
 with an explicit estimate on the Lipschitz constant. -/
-theorem exists_nhdsWithin_lipschitzOnWith_of_hasFderivWithinAt (hs : Convex ℝ s) {f : E → G}
-    (hder : ∀ᶠ y in 𝓝[s] x, HasFderivWithinAt f (f' y) s y) (hcont : ContinuousWithinAt f' s x) :
+theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt (hs : Convex ℝ s) {f : E → G}
+    (hder : ∀ᶠ y in 𝓝[s] x, HasFDerivWithinAt f (f' y) s y) (hcont : ContinuousWithinAt f' s x) :
     ∃ K, ∃ t ∈ 𝓝[s] x, LipschitzOnWith K f t :=
   (exists_gt _).imp <|
-    hs.exists_nhdsWithin_lipschitzOnWith_of_hasFderivWithinAt_of_nnnorm_lt hder hcont
-#align convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFderivWithinAt
+    hs.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt hder hcont
+#align convex.exists_nhds_within_lipschitz_on_with_of_has_fderiv_within_at Convex.exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt
 
 /-- The mean value theorem on a convex set: if the derivative of a function within this set is
 bounded by `C`, then the function is `C`-Lipschitz. Version with `fderiv_within`. -/
 theorem norm_image_sub_le_of_norm_fderivWithin_le (hf : DifferentiableOn 𝕜 f s)
     (bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x‖ ≤ C * ‖y - x‖ :=
-  hs.norm_image_sub_le_of_norm_has_fderiv_within_le (fun x hx => (hf x hx).HasFderivWithinAt) bound
+  hs.norm_image_sub_le_of_norm_has_fderiv_within_le (fun x hx => (hf x hx).HasFDerivWithinAt) bound
     xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_within_le Convex.norm_image_sub_le_of_norm_fderivWithin_le
 
@@ -563,7 +563,7 @@ theorem norm_image_sub_le_of_norm_fderivWithin_le (hf : DifferentiableOn 𝕜 f
 `lipschitz_on_with`. -/
 theorem lipschitzOnWith_of_nnnorm_fderivWithin_le {C : ℝ≥0} (hf : DifferentiableOn 𝕜 f s)
     (bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x‖₊ ≤ C) (hs : Convex ℝ s) : LipschitzOnWith C f s :=
-  hs.lipschitzOnWith_of_nnnorm_has_fderiv_within_le (fun x hx => (hf x hx).HasFderivWithinAt) bound
+  hs.lipschitzOnWith_of_nnnorm_has_fderiv_within_le (fun x hx => (hf x hx).HasFDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_fderiv_within_le Convex.lipschitzOnWith_of_nnnorm_fderivWithin_le
 
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C`,
@@ -572,7 +572,7 @@ theorem norm_image_sub_le_of_norm_fderiv_le (hf : ∀ x ∈ s, DifferentiableAt
     (bound : ∀ x ∈ s, ‖fderiv 𝕜 f x‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x‖ ≤ C * ‖y - x‖ :=
   hs.norm_image_sub_le_of_norm_has_fderiv_within_le
-    (fun x hx => (hf x hx).HasFderivAt.HasFderivWithinAt) bound xs ys
+    (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_le Convex.norm_image_sub_le_of_norm_fderiv_le
 
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C` on
@@ -580,21 +580,21 @@ theorem norm_image_sub_le_of_norm_fderiv_le (hf : ∀ x ∈ s, DifferentiableAt
 theorem lipschitzOnWith_of_nnnorm_fderiv_le {C : ℝ≥0} (hf : ∀ x ∈ s, DifferentiableAt 𝕜 f x)
     (bound : ∀ x ∈ s, ‖fderiv 𝕜 f x‖₊ ≤ C) (hs : Convex ℝ s) : LipschitzOnWith C f s :=
   hs.lipschitzOnWith_of_nnnorm_has_fderiv_within_le
-    (fun x hx => (hf x hx).HasFderivAt.HasFderivWithinAt) bound
+    (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_fderiv_le Convex.lipschitzOnWith_of_nnnorm_fderiv_le
 
 /-- Variant of the mean value inequality on a convex set, using a bound on the difference between
 the derivative and a fixed linear map, rather than a bound on the derivative itself. Version with
 `has_fderiv_within`. -/
 theorem norm_image_sub_le_of_norm_has_fderiv_within_le'
-    (hf : ∀ x ∈ s, HasFderivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x - φ‖ ≤ C)
+    (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x - φ‖ ≤ C)
     (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) : ‖f y - f x - φ (y - x)‖ ≤ C * ‖y - x‖ :=
   by
   /- We subtract `φ` to define a new function `g` for which `g' = 0`, for which the previous theorem
     applies, `convex.norm_image_sub_le_of_norm_has_fderiv_within_le`. Then, we just need to glue
     together the pieces, expressing back `f` in terms of `g`. -/
   let g y := f y - φ y
-  have hg : ∀ x ∈ s, HasFderivWithinAt g (f' x - φ) s x := fun x xs =>
+  have hg : ∀ x ∈ s, HasFDerivWithinAt g (f' x - φ) s x := fun x xs =>
     (hf x xs).sub φ.has_fderiv_within_at
   calc
     ‖f y - f x - φ (y - x)‖ = ‖f y - f x - (φ y - φ x)‖ := by simp
@@ -608,7 +608,7 @@ theorem norm_image_sub_le_of_norm_has_fderiv_within_le'
 theorem norm_image_sub_le_of_norm_fderivWithin_le' (hf : DifferentiableOn 𝕜 f s)
     (bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x - φ‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x - φ (y - x)‖ ≤ C * ‖y - x‖ :=
-  hs.norm_image_sub_le_of_norm_has_fderiv_within_le' (fun x hx => (hf x hx).HasFderivWithinAt) bound
+  hs.norm_image_sub_le_of_norm_has_fderiv_within_le' (fun x hx => (hf x hx).HasFDerivWithinAt) bound
     xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_within_le' Convex.norm_image_sub_le_of_norm_fderivWithin_le'
 
@@ -617,7 +617,7 @@ theorem norm_image_sub_le_of_norm_fderiv_le' (hf : ∀ x ∈ s, DifferentiableAt
     (bound : ∀ x ∈ s, ‖fderiv 𝕜 f x - φ‖ ≤ C) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ‖f y - f x - φ (y - x)‖ ≤ C * ‖y - x‖ :=
   hs.norm_image_sub_le_of_norm_has_fderiv_within_le'
-    (fun x hx => (hf x hx).HasFderivAt.HasFderivWithinAt) bound xs ys
+    (fun x hx => (hf x hx).HasFDerivAt.HasFDerivWithinAt) bound xs ys
 #align convex.norm_image_sub_le_of_norm_fderiv_le' Convex.norm_image_sub_le_of_norm_fderiv_le'
 
 /-- If a function has zero Fréchet derivative at every point of a convex set,
@@ -647,7 +647,7 @@ bounded by `C`, then the function is `C`-Lipschitz. Version with `has_deriv_with
 theorem norm_image_sub_le_of_norm_has_deriv_within_le {C : ℝ}
     (hf : ∀ x ∈ s, HasDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖ ≤ C) (hs : Convex ℝ s)
     (xs : x ∈ s) (ys : y ∈ s) : ‖f y - f x‖ ≤ C * ‖y - x‖ :=
-  Convex.norm_image_sub_le_of_norm_has_fderiv_within_le (fun x hx => (hf x hx).HasFderivWithinAt)
+  Convex.norm_image_sub_le_of_norm_has_fderiv_within_le (fun x hx => (hf x hx).HasFDerivWithinAt)
     (fun x hx => le_trans (by simp) (bound x hx)) hs xs ys
 #align convex.norm_image_sub_le_of_norm_has_deriv_within_le Convex.norm_image_sub_le_of_norm_has_deriv_within_le
 
@@ -657,7 +657,7 @@ Version with `has_deriv_within` and `lipschitz_on_with`. -/
 theorem lipschitzOnWith_of_nnnorm_has_deriv_within_le {C : ℝ≥0} (hs : Convex ℝ s)
     (hf : ∀ x ∈ s, HasDerivWithinAt f (f' x) s x) (bound : ∀ x ∈ s, ‖f' x‖₊ ≤ C) :
     LipschitzOnWith C f s :=
-  Convex.lipschitzOnWith_of_nnnorm_has_fderiv_within_le (fun x hx => (hf x hx).HasFderivWithinAt)
+  Convex.lipschitzOnWith_of_nnnorm_has_fderiv_within_le (fun x hx => (hf x hx).HasFDerivWithinAt)
     (fun x hx => le_trans (by simp) (bound x hx)) hs
 #align convex.lipschitz_on_with_of_nnnorm_has_deriv_within_le Convex.lipschitzOnWith_of_nnnorm_has_deriv_within_le
 
@@ -1370,7 +1370,7 @@ theorem strictConcaveOn_univ_of_deriv2_neg {f : ℝ → ℝ} (hf : Continuous f)
 
 /-- Lagrange's Mean Value Theorem, applied to convex domains. -/
 theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ] ℝ}
-    (hf : ∀ x ∈ s, HasFderivWithinAt f (f' x) s x) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
+    (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
     ∃ z ∈ segment ℝ x y, f y - f x = f' z (y - x) :=
   by
   have hIccIoo := @Ioo_subset_Icc_self ℝ _ 0 1
@@ -1432,9 +1432,9 @@ variable {𝕜 : Type _} [IsROrC 𝕜] {G : Type _} [NormedAddCommGroup G] [Norm
 
 /-- Over the reals or the complexes, a continuously differentiable function is strictly
 differentiable. -/
-theorem hasStrictFderivAt_of_hasFderivAt_of_continuousAt
-    (hder : ∀ᶠ y in 𝓝 x, HasFderivAt f (f' y) y) (hcont : ContinuousAt f' x) :
-    HasStrictFderivAt f (f' x) x :=
+theorem hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
+    (hder : ∀ᶠ y in 𝓝 x, HasFDerivAt f (f' y) y) (hcont : ContinuousAt f' x) :
+    HasStrictFDerivAt f (f' x) x :=
   by
   -- turn little-o definition of strict_fderiv into an epsilon-delta statement
   refine' is_o_iff.mpr fun c hc => metric.eventually_nhds_iff_ball.mpr _
@@ -1453,15 +1453,15 @@ theorem hasStrictFderivAt_of_hasFderivAt_of_continuousAt
   -- apply mean value theorem
   letI : NormedSpace ℝ G := RestrictScalars.normedSpace ℝ 𝕜 G
   refine' (convex_ball _ _).norm_image_sub_le_of_norm_has_fderiv_within_le' _ hf' h.2 h.1
-  exact fun y hy => (hε hy).1.HasFderivWithinAt
-#align has_strict_fderiv_at_of_has_fderiv_at_of_continuous_at hasStrictFderivAt_of_hasFderivAt_of_continuousAt
+  exact fun y hy => (hε hy).1.HasFDerivWithinAt
+#align has_strict_fderiv_at_of_has_fderiv_at_of_continuous_at hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt
 
 /-- Over the reals or the complexes, a continuously differentiable function is strictly
 differentiable. -/
 theorem hasStrictDerivAt_of_hasDerivAt_of_continuousAt {f f' : 𝕜 → G} {x : 𝕜}
     (hder : ∀ᶠ y in 𝓝 x, HasDerivAt f (f' y) y) (hcont : ContinuousAt f' x) :
     HasStrictDerivAt f (f' x) x :=
-  hasStrictFderivAt_of_hasFderivAt_of_continuousAt (hder.mono fun y hy => hy.HasFderivAt) <|
+  hasStrictFDerivAt_of_hasFDerivAt_of_continuousAt (hder.mono fun y hy => hy.HasFDerivAt) <|
     (smulRightL 𝕜 𝕜 G 1).Continuous.ContinuousAt.comp hcont
 #align has_strict_deriv_at_of_has_deriv_at_of_continuous_at hasStrictDerivAt_of_hasDerivAt_of_continuousAt
 
Diff
@@ -166,7 +166,7 @@ theorem image_le_of_liminf_slope_right_le_deriv_boundary {f : ℝ → ℝ} {a b
     by
     intro x hx r hr
     apply image_le_of_liminf_slope_right_lt_deriv_boundary' hf bound
-    · rwa [sub_self, mul_zero, add_zero]
+    · rwa [sub_self, MulZeroClass.mul_zero, add_zero]
     · exact hB.add (continuous_on_const.mul (continuous_id.continuous_on.sub continuousOn_const))
     · intro x hx
       exact (hB' x hx).add (((hasDerivWithinAt_id x (Ici x)).sub_const a).const_mul r)
@@ -355,7 +355,7 @@ theorem norm_image_sub_le_of_norm_deriv_right_le_segment {f' : ℝ → E} {C : 
     simpa using (hasDerivAt_const x C).mul ((hasDerivAt_id x).sub (hasDerivAt_const x a))
   convert image_norm_le_of_norm_deriv_right_le_deriv_boundary hg hg' _ hB bound
   simp only [g, B]
-  rw [sub_self, norm_zero, sub_self, mul_zero]
+  rw [sub_self, norm_zero, sub_self, MulZeroClass.mul_zero]
 #align norm_image_sub_le_of_norm_deriv_right_le_segment norm_image_sub_le_of_norm_deriv_right_le_segment
 
 /-- A function on `[a, b]` with the norm of the derivative within `[a, b]`
@@ -403,7 +403,7 @@ theorem norm_image_sub_le_of_norm_deriv_le_segment_01 {C : ℝ}
 
 theorem constant_of_has_deriv_right_zero (hcont : ContinuousOn f (Icc a b))
     (hderiv : ∀ x ∈ Ico a b, HasDerivWithinAt f 0 (Ici x) x) : ∀ x ∈ Icc a b, f x = f a := by
-  simpa only [zero_mul, norm_le_zero_iff, sub_eq_zero] using fun x hx =>
+  simpa only [MulZeroClass.zero_mul, norm_le_zero_iff, sub_eq_zero] using fun x hx =>
     norm_image_sub_le_of_norm_deriv_right_le_segment hcont hderiv
       (fun y hy => by rw [norm_le_zero_iff]) x hx
 #align constant_of_has_deriv_right_zero constant_of_has_deriv_right_zero
@@ -413,7 +413,7 @@ theorem constant_of_derivWithin_zero (hdiff : DifferentiableOn ℝ f (Icc a b))
   by
   have H : ∀ x ∈ Ico a b, ‖derivWithin f (Icc a b) x‖ ≤ 0 := by
     simpa only [norm_le_zero_iff] using fun x hx => hderiv x hx
-  simpa only [zero_mul, norm_le_zero_iff, sub_eq_zero] using fun x hx =>
+  simpa only [MulZeroClass.zero_mul, norm_le_zero_iff, sub_eq_zero] using fun x hx =>
     norm_image_sub_le_of_norm_deriv_le_segment hdiff H x hx
 #align constant_of_deriv_within_zero constant_of_derivWithin_zero
 
@@ -626,7 +626,7 @@ theorem is_const_of_fderivWithin_eq_zero (hs : Convex ℝ s) (hf : Differentiabl
     (hf' : ∀ x ∈ s, fderivWithin 𝕜 f s x = 0) (hx : x ∈ s) (hy : y ∈ s) : f x = f y :=
   by
   have bound : ∀ x ∈ s, ‖fderivWithin 𝕜 f s x‖ ≤ 0 := fun x hx => by simp only [hf' x hx, norm_zero]
-  simpa only [(dist_eq_norm _ _).symm, zero_mul, dist_le_zero, eq_comm] using
+  simpa only [(dist_eq_norm _ _).symm, MulZeroClass.zero_mul, dist_le_zero, eq_comm] using
     hs.norm_image_sub_le_of_norm_fderiv_within_le hf bound hx hy
 #align convex.is_const_of_fderiv_within_eq_zero Convex.is_const_of_fderivWithin_eq_zero
 
@@ -871,7 +871,7 @@ theorem Convex.mul_sub_le_image_sub_of_le_deriv {D : Set ℝ} (hD : Convex ℝ D
   by
   intro x hx y hy hxy
   cases' eq_or_lt_of_le hxy with hxy' hxy'
-  · rw [hxy', sub_self, sub_self, mul_zero]
+  · rw [hxy', sub_self, sub_self, MulZeroClass.mul_zero]
   have hxyD : Icc x y ⊆ D := hD.ord_connected.out hx hy
   have hxyD' : Ioo x y ⊆ interior D :=
     subset_sUnion_of_mem ⟨isOpen_Ioo, subset.trans Ioo_subset_Icc_self hxyD⟩
@@ -956,7 +956,8 @@ theorem Convex.strictMonoOn_of_deriv_pos {D : Set ℝ} (hD : Convex ℝ D) {f :
     (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, 0 < deriv f x) : StrictMonoOn f D :=
   by
   rintro x hx y hy
-  simpa only [zero_mul, sub_pos] using hD.mul_sub_lt_image_sub_of_lt_deriv hf _ hf' x hx y hy
+  simpa only [MulZeroClass.zero_mul, sub_pos] using
+    hD.mul_sub_lt_image_sub_of_lt_deriv hf _ hf' x hx y hy
   exact fun z hz => (differentiableAt_of_deriv_ne_zero (hf' z hz).ne').DifferentiableWithinAt
 #align convex.strict_mono_on_of_deriv_pos Convex.strictMonoOn_of_deriv_pos
 
@@ -978,7 +979,7 @@ of the real line. If `f` is differentiable on the interior of `D` and `f'` is no
 theorem Convex.monotoneOn_of_deriv_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : DifferentiableOn ℝ f (interior D))
     (hf'_nonneg : ∀ x ∈ interior D, 0 ≤ deriv f x) : MonotoneOn f D := fun x hx y hy hxy => by
-  simpa only [zero_mul, sub_nonneg] using
+  simpa only [MulZeroClass.zero_mul, sub_nonneg] using
     hD.mul_sub_le_image_sub_of_le_deriv hf hf' hf'_nonneg x hx y hy hxy
 #align convex.monotone_on_of_deriv_nonneg Convex.monotoneOn_of_deriv_nonneg
 
@@ -997,7 +998,7 @@ of the real line. If `f` is differentiable on the interior of `D` and `f'` is ne
 theorem Convex.strictAntiOn_of_deriv_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, deriv f x < 0) : StrictAntiOn f D :=
   fun x hx y => by
-  simpa only [zero_mul, sub_lt_zero] using
+  simpa only [MulZeroClass.zero_mul, sub_lt_zero] using
     hD.image_sub_lt_mul_sub_of_deriv_lt hf
       (fun z hz => (differentiableAt_of_deriv_ne_zero (hf' z hz).Ne).DifferentiableWithinAt) hf' x
       hx y
@@ -1021,7 +1022,7 @@ of the real line. If `f` is differentiable on the interior of `D` and `f'` is no
 theorem Convex.antitoneOn_of_deriv_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : DifferentiableOn ℝ f (interior D))
     (hf'_nonpos : ∀ x ∈ interior D, deriv f x ≤ 0) : AntitoneOn f D := fun x hx y hy hxy => by
-  simpa only [zero_mul, sub_nonpos] using
+  simpa only [MulZeroClass.zero_mul, sub_nonpos] using
     hD.image_sub_le_mul_sub_of_deriv_le hf hf' hf'_nonpos x hx y hy hxy
 #align convex.antitone_on_of_deriv_nonpos Convex.antitoneOn_of_deriv_nonpos
 
Diff
@@ -829,7 +829,7 @@ theorem exists_deriv_eq_slope : ∃ c ∈ Ioo a b, deriv f c = (f b - f a) / (b
 
 end Interval
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:628:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C < f'`, then
 `f` grows faster than `C * x` on `D`, i.e., `C * (y - x) < f y - f x` whenever `x, y ∈ D`,
@@ -859,7 +859,7 @@ theorem mul_sub_lt_image_sub_of_lt_deriv {f : ℝ → ℝ} (hf : Differentiable
     (fun x _ => hf'_gt x) x trivial y trivial hxy
 #align mul_sub_lt_image_sub_of_lt_deriv mul_sub_lt_image_sub_of_lt_deriv
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:628:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `C ≤ f'`, then
 `f` grows at least as fast as `C * x` on `D`, i.e., `C * (y - x) ≤ f y - f x` whenever `x, y ∈ D`,
@@ -891,7 +891,7 @@ theorem mul_sub_le_image_sub_of_le_deriv {f : ℝ → ℝ} (hf : Differentiable
     (fun x _ => hf'_ge x) x trivial y trivial hxy
 #align mul_sub_le_image_sub_of_le_deriv mul_sub_le_image_sub_of_le_deriv
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:628:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' < C`, then
 `f` grows slower than `C * x` on `D`, i.e., `f y - f x < C * (y - x)` whenever `x, y ∈ D`,
@@ -919,7 +919,7 @@ theorem image_sub_lt_mul_sub_of_deriv_lt {f : ℝ → ℝ} (hf : Differentiable
     (fun x _ => lt_hf' x) x trivial y trivial hxy
 #align image_sub_lt_mul_sub_of_deriv_lt image_sub_lt_mul_sub_of_deriv_lt
 
-/- ./././Mathport/Syntax/Translate/Basic.lean:628:2: warning: expanding binder collection (x y «expr ∈ » D) -/
+/- ./././Mathport/Syntax/Translate/Basic.lean:635:2: warning: expanding binder collection (x y «expr ∈ » D) -/
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f' ≤ C`, then
 `f` grows at most as fast as `C * x` on `D`, i.e., `f y - f x ≤ C * (y - x)` whenever `x, y ∈ D`,

Changes in mathlib4

mathlib3
mathlib4
move(RCLike): Move out of Data (#11753)

RCLike is an analytic typeclass, hence should be under Analysis

Diff
@@ -9,7 +9,7 @@ import Mathlib.Analysis.Calculus.Deriv.Mul
 import Mathlib.Analysis.Calculus.Deriv.Comp
 import Mathlib.Analysis.Calculus.LocalExtr.Rolle
 import Mathlib.Analysis.Convex.Normed
-import Mathlib.Data.RCLike.Basic
+import Mathlib.Analysis.RCLike.Basic
 
 #align_import analysis.calculus.mean_value from "leanprover-community/mathlib"@"3bce8d800a6f2b8f63fe1e588fd76a9ff4adcebe"
 
chore: Rename IsROrC to RCLike (#10819)

IsROrC contains data, which goes against the expectation that classes prefixed with Is are prop-valued. People have been complaining about this on and off, so this PR renames IsROrC to RCLike.

Diff
@@ -9,7 +9,7 @@ import Mathlib.Analysis.Calculus.Deriv.Mul
 import Mathlib.Analysis.Calculus.Deriv.Comp
 import Mathlib.Analysis.Calculus.LocalExtr.Rolle
 import Mathlib.Analysis.Convex.Normed
-import Mathlib.Data.IsROrC.Basic
+import Mathlib.Data.RCLike.Basic
 
 #align_import analysis.calculus.mean_value from "leanprover-community/mathlib"@"3bce8d800a6f2b8f63fe1e588fd76a9ff4adcebe"
 
@@ -21,7 +21,7 @@ In this file we prove the following facts:
 * `Convex.norm_image_sub_le_of_norm_deriv_le` : if `f` is differentiable on a convex set `s`
   and the norm of its derivative is bounded by `C`, then `f` is Lipschitz continuous on `s` with
   constant `C`; also a variant in which what is bounded by `C` is the norm of the difference of the
-  derivative from a fixed linear map. This lemma and its versions are formulated using `IsROrC`,
+  derivative from a fixed linear map. This lemma and its versions are formulated using `RCLike`,
   so they work both for real and complex derivatives.
 
 * `image_le_of*`, `image_norm_le_of_*` : several similar lemmas deducing `f x ≤ B x` or
@@ -439,12 +439,12 @@ end
 ### Vector-valued functions `f : E → G`
 
 Theorems in this section work both for real and complex differentiable functions. We use assumptions
-`[IsROrC 𝕜] [NormedSpace 𝕜 E] [NormedSpace 𝕜 G]` to achieve this result. For the domain `E` we
+`[RCLike 𝕜] [NormedSpace 𝕜 E] [NormedSpace 𝕜 G]` to achieve this result. For the domain `E` we
 also assume `[NormedSpace ℝ E]` to have a notion of a `Convex` set. -/
 
 section
 
-variable {𝕜 G : Type*} [IsROrC 𝕜] [NormedSpace 𝕜 E] [NormedAddCommGroup G] [NormedSpace 𝕜 G]
+variable {𝕜 G : Type*} [RCLike 𝕜] [NormedSpace 𝕜 E] [NormedAddCommGroup G] [NormedSpace 𝕜 G]
 
 namespace Convex
 
@@ -1080,7 +1080,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
   simpa [g] using hMVT'.symm
 #align domain_mvt domain_mvt
 
-section IsROrC
+section RCLike
 
 /-!
 ### Vector-valued functions `f : E → F`. Strict differentiability.
@@ -1092,7 +1092,7 @@ make sense and are enough. Many formulations of the mean value inequality could
 balls over `ℝ` or `ℂ`. For now, we only include the ones that we need.
 -/
 
-variable {𝕜 : Type*} [IsROrC 𝕜] {G : Type*} [NormedAddCommGroup G] [NormedSpace 𝕜 G] {H : Type*}
+variable {𝕜 : Type*} [RCLike 𝕜] {G : Type*} [NormedAddCommGroup G] [NormedSpace 𝕜 G] {H : Type*}
   [NormedAddCommGroup H] [NormedSpace 𝕜 H] {f : G → H} {f' : G → G →L[𝕜] H} {x : G}
 
 /-- Over the reals or the complexes, a continuously differentiable function is strictly
@@ -1127,4 +1127,4 @@ theorem hasStrictDerivAt_of_hasDerivAt_of_continuousAt {f f' : 𝕜 → G} {x :
     (smulRightL 𝕜 𝕜 G 1).continuous.continuousAt.comp hcont
 #align has_strict_deriv_at_of_has_deriv_at_of_continuous_at hasStrictDerivAt_of_hasDerivAt_of_continuousAt
 
-end IsROrC
+end RCLike
chore: add missing aliases for recently renamed lemmas (#11117)

From #9618, #10525 and #10614.

Diff
@@ -931,6 +931,9 @@ lemma strictMonoOn_of_hasDerivWithinAt_pos {D : Set ℝ} (hD : Convex ℝ D) {f
   strictMonoOn_of_deriv_pos hD hf fun x hx ↦ by
     rw [deriv_eqOn isOpen_interior hf' hx]; exact hf'₀ _ hx
 
+@[deprecated] -- 2024-03-02
+alias StrictMonoOn_of_hasDerivWithinAt_pos := strictMonoOn_of_hasDerivWithinAt_pos
+
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is strictly positive, then
 `f` is a strictly monotone function. -/
 lemma strictMono_of_hasDerivAt_pos {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt f (f' x) x)
@@ -1004,12 +1007,18 @@ lemma strictAntiOn_of_hasDerivWithinAt_neg {D : Set ℝ} (hD : Convex ℝ D) {f
   strictAntiOn_of_deriv_neg hD hf fun x hx ↦ by
     rw [deriv_eqOn isOpen_interior hf' hx]; exact hf'₀ _ hx
 
+@[deprecated] -- 2024-03-02
+alias StrictAntiOn_of_hasDerivWithinAt_pos := strictAntiOn_of_hasDerivWithinAt_neg
+
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is strictly positive, then
 `f` is a strictly monotone function. -/
 lemma strictAnti_of_hasDerivAt_neg {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt f (f' x) x)
     (hf' : ∀ x, f' x < 0) : StrictAnti f :=
   strictAnti_of_deriv_neg fun x ↦ by rw [(hf _).deriv]; exact hf' _
 
+@[deprecated] -- 2024-03-02
+alias strictAnti_of_hasDerivAt_pos := strictAnti_of_hasDerivAt_neg
+
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonpositive, then
 `f` is an antitone function on `D`. -/
refactor(Analysis/Calculus/MeanValue): move convexity material elsewhere (#11588)

This PR carves off lemmas about convex functions from Analysis/Calculus/MeanValue.lean and relocates them to a new file. (This is a preparatory step for adding some more lemmas on this topic, since MeanValue.lean is already over-long.)

Diff
@@ -8,10 +8,8 @@ import Mathlib.Analysis.Calculus.Deriv.Slope
 import Mathlib.Analysis.Calculus.Deriv.Mul
 import Mathlib.Analysis.Calculus.Deriv.Comp
 import Mathlib.Analysis.Calculus.LocalExtr.Rolle
-import Mathlib.Analysis.Convex.Slope
 import Mathlib.Analysis.Convex.Normed
 import Mathlib.Data.IsROrC.Basic
-import Mathlib.Topology.Instances.RealVectorSpace
 
 #align_import analysis.calculus.mean_value from "leanprover-community/mathlib"@"3bce8d800a6f2b8f63fe1e588fd76a9ff4adcebe"
 
@@ -1047,354 +1045,8 @@ lemma antitone_of_hasDerivAt_nonpos {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt
   antitone_of_deriv_nonpos (fun x ↦ (hf _).differentiableAt) fun x ↦ by
     rw [(hf _).deriv]; exact hf' _
 
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is differentiable on its interior,
-and `f'` is monotone on the interior, then `f` is convex on `D`. -/
-theorem MonotoneOn.convexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : DifferentiableOn ℝ f (interior D))
-    (hf'_mono : MonotoneOn (deriv f) (interior D)) : ConvexOn ℝ D f :=
-  convexOn_of_slope_mono_adjacent hD
-    (by
-      intro x y z hx hz hxy hyz
-      -- First we prove some trivial inclusions
-      have hxzD : Icc x z ⊆ D := hD.ordConnected.out hx hz
-      have hxyD : Icc x y ⊆ D := (Icc_subset_Icc_right hyz.le).trans hxzD
-      have hxyD' : Ioo x y ⊆ interior D :=
-        subset_sUnion_of_mem ⟨isOpen_Ioo, Ioo_subset_Icc_self.trans hxyD⟩
-      have hyzD : Icc y z ⊆ D := (Icc_subset_Icc_left hxy.le).trans hxzD
-      have hyzD' : Ioo y z ⊆ interior D :=
-        subset_sUnion_of_mem ⟨isOpen_Ioo, Ioo_subset_Icc_self.trans hyzD⟩
-      -- Then we apply MVT to both `[x, y]` and `[y, z]`
-      obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x) :=
-        exists_deriv_eq_slope f hxy (hf.mono hxyD) (hf'.mono hxyD')
-      obtain ⟨b, ⟨hyb, hbz⟩, hb⟩ : ∃ b ∈ Ioo y z, deriv f b = (f z - f y) / (z - y) :=
-        exists_deriv_eq_slope f hyz (hf.mono hyzD) (hf'.mono hyzD')
-      rw [← ha, ← hb]
-      exact hf'_mono (hxyD' ⟨hxa, hay⟩) (hyzD' ⟨hyb, hbz⟩) (hay.trans hyb).le)
-#align monotone_on.convex_on_of_deriv MonotoneOn.convexOn_of_deriv
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is differentiable on its interior,
-and `f'` is antitone on the interior, then `f` is concave on `D`. -/
-theorem AntitoneOn.concaveOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : DifferentiableOn ℝ f (interior D))
-    (h_anti : AntitoneOn (deriv f) (interior D)) : ConcaveOn ℝ D f :=
-  haveI : MonotoneOn (deriv (-f)) (interior D) := by
-    simpa only [← deriv.neg] using h_anti.neg
-  neg_convexOn_iff.mp (this.convexOn_of_deriv hD hf.neg hf'.neg)
-#align antitone_on.concave_on_of_deriv AntitoneOn.concaveOn_of_deriv
-
-theorem StrictMonoOn.exists_slope_lt_deriv_aux {x y : ℝ} {f : ℝ → ℝ} (hf : ContinuousOn f (Icc x y))
-    (hxy : x < y) (hf'_mono : StrictMonoOn (deriv f) (Ioo x y)) (h : ∀ w ∈ Ioo x y, deriv f w ≠ 0) :
-    ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a := by
-  have A : DifferentiableOn ℝ f (Ioo x y) := fun w wmem =>
-    (differentiableAt_of_deriv_ne_zero (h w wmem)).differentiableWithinAt
-  obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x) :=
-    exists_deriv_eq_slope f hxy hf A
-  rcases nonempty_Ioo.2 hay with ⟨b, ⟨hab, hby⟩⟩
-  refine' ⟨b, ⟨hxa.trans hab, hby⟩, _⟩
-  rw [← ha]
-  exact hf'_mono ⟨hxa, hay⟩ ⟨hxa.trans hab, hby⟩ hab
-#align strict_mono_on.exists_slope_lt_deriv_aux StrictMonoOn.exists_slope_lt_deriv_aux
-
-theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : ContinuousOn f (Icc x y))
-    (hxy : x < y) (hf'_mono : StrictMonoOn (deriv f) (Ioo x y)) :
-    ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a := by
-  by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
-  · apply StrictMonoOn.exists_slope_lt_deriv_aux hf hxy hf'_mono h
-  · push_neg at h
-    rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
-    obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ a ∈ Ioo x w, (f w - f x) / (w - x) < deriv f a := by
-      apply StrictMonoOn.exists_slope_lt_deriv_aux _ hxw _ _
-      · exact hf.mono (Icc_subset_Icc le_rfl hwy.le)
-      · exact hf'_mono.mono (Ioo_subset_Ioo le_rfl hwy.le)
-      · intro z hz
-        rw [← hw]
-        apply ne_of_lt
-        exact hf'_mono ⟨hz.1, hz.2.trans hwy⟩ ⟨hxw, hwy⟩ hz.2
-    obtain ⟨b, ⟨hwb, hby⟩, hb⟩ : ∃ b ∈ Ioo w y, (f y - f w) / (y - w) < deriv f b := by
-      apply StrictMonoOn.exists_slope_lt_deriv_aux _ hwy _ _
-      · refine' hf.mono (Icc_subset_Icc hxw.le le_rfl)
-      · exact hf'_mono.mono (Ioo_subset_Ioo hxw.le le_rfl)
-      · intro z hz
-        rw [← hw]
-        apply ne_of_gt
-        exact hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hz.1, hz.2⟩ hz.1
-    refine' ⟨b, ⟨hxw.trans hwb, hby⟩, _⟩
-    simp only [div_lt_iff, hxy, hxw, hwy, sub_pos] at ha hb ⊢
-    have : deriv f a * (w - x) < deriv f b * (w - x) := by
-      apply mul_lt_mul _ le_rfl (sub_pos.2 hxw) _
-      · exact hf'_mono ⟨hxa, haw.trans hwy⟩ ⟨hxw.trans hwb, hby⟩ (haw.trans hwb)
-      · rw [← hw]
-        exact (hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hwb, hby⟩ hwb).le
-    linarith
-#align strict_mono_on.exists_slope_lt_deriv StrictMonoOn.exists_slope_lt_deriv
-
-theorem StrictMonoOn.exists_deriv_lt_slope_aux {x y : ℝ} {f : ℝ → ℝ} (hf : ContinuousOn f (Icc x y))
-    (hxy : x < y) (hf'_mono : StrictMonoOn (deriv f) (Ioo x y)) (h : ∀ w ∈ Ioo x y, deriv f w ≠ 0) :
-    ∃ a ∈ Ioo x y, deriv f a < (f y - f x) / (y - x) := by
-  have A : DifferentiableOn ℝ f (Ioo x y) := fun w wmem =>
-    (differentiableAt_of_deriv_ne_zero (h w wmem)).differentiableWithinAt
-  obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x) :=
-    exists_deriv_eq_slope f hxy hf A
-  rcases nonempty_Ioo.2 hxa with ⟨b, ⟨hxb, hba⟩⟩
-  refine' ⟨b, ⟨hxb, hba.trans hay⟩, _⟩
-  rw [← ha]
-  exact hf'_mono ⟨hxb, hba.trans hay⟩ ⟨hxa, hay⟩ hba
-#align strict_mono_on.exists_deriv_lt_slope_aux StrictMonoOn.exists_deriv_lt_slope_aux
-
-theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : ContinuousOn f (Icc x y))
-    (hxy : x < y) (hf'_mono : StrictMonoOn (deriv f) (Ioo x y)) :
-    ∃ a ∈ Ioo x y, deriv f a < (f y - f x) / (y - x) := by
-  by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
-  · apply StrictMonoOn.exists_deriv_lt_slope_aux hf hxy hf'_mono h
-  · push_neg at h
-    rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
-    obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ a ∈ Ioo x w, deriv f a < (f w - f x) / (w - x) := by
-      apply StrictMonoOn.exists_deriv_lt_slope_aux _ hxw _ _
-      · exact hf.mono (Icc_subset_Icc le_rfl hwy.le)
-      · exact hf'_mono.mono (Ioo_subset_Ioo le_rfl hwy.le)
-      · intro z hz
-        rw [← hw]
-        apply ne_of_lt
-        exact hf'_mono ⟨hz.1, hz.2.trans hwy⟩ ⟨hxw, hwy⟩ hz.2
-    obtain ⟨b, ⟨hwb, hby⟩, hb⟩ : ∃ b ∈ Ioo w y, deriv f b < (f y - f w) / (y - w) := by
-      apply StrictMonoOn.exists_deriv_lt_slope_aux _ hwy _ _
-      · refine' hf.mono (Icc_subset_Icc hxw.le le_rfl)
-      · exact hf'_mono.mono (Ioo_subset_Ioo hxw.le le_rfl)
-      · intro z hz
-        rw [← hw]
-        apply ne_of_gt
-        exact hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hz.1, hz.2⟩ hz.1
-    refine' ⟨a, ⟨hxa, haw.trans hwy⟩, _⟩
-    simp only [lt_div_iff, hxy, hxw, hwy, sub_pos] at ha hb ⊢
-    have : deriv f a * (y - w) < deriv f b * (y - w) := by
-      apply mul_lt_mul _ le_rfl (sub_pos.2 hwy) _
-      · exact hf'_mono ⟨hxa, haw.trans hwy⟩ ⟨hxw.trans hwb, hby⟩ (haw.trans hwb)
-      · rw [← hw]
-        exact (hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hwb, hby⟩ hwb).le
-    linarith
-#align strict_mono_on.exists_deriv_lt_slope StrictMonoOn.exists_deriv_lt_slope
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, and `f'` is strictly monotone on the
-interior, then `f` is strictly convex on `D`.
-Note that we don't require differentiability, since it is guaranteed at all but at most
-one point by the strict monotonicity of `f'`. -/
-theorem StrictMonoOn.strictConvexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : StrictMonoOn (deriv f) (interior D)) : StrictConvexOn ℝ D f :=
-  strictConvexOn_of_slope_strict_mono_adjacent hD fun {x y z} hx hz hxy hyz => by
-    -- First we prove some trivial inclusions
-    have hxzD : Icc x z ⊆ D := hD.ordConnected.out hx hz
-    have hxyD : Icc x y ⊆ D := (Icc_subset_Icc_right hyz.le).trans hxzD
-    have hxyD' : Ioo x y ⊆ interior D :=
-      subset_sUnion_of_mem ⟨isOpen_Ioo, Ioo_subset_Icc_self.trans hxyD⟩
-    have hyzD : Icc y z ⊆ D := (Icc_subset_Icc_left hxy.le).trans hxzD
-    have hyzD' : Ioo y z ⊆ interior D :=
-      subset_sUnion_of_mem ⟨isOpen_Ioo, Ioo_subset_Icc_self.trans hyzD⟩
-    -- Then we get points `a` and `b` in each interval `[x, y]` and `[y, z]` where the derivatives
-    -- can be compared to the slopes between `x, y` and `y, z` respectively.
-    obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a :=
-      StrictMonoOn.exists_slope_lt_deriv (hf.mono hxyD) hxy (hf'.mono hxyD')
-    obtain ⟨b, ⟨hyb, hbz⟩, hb⟩ : ∃ b ∈ Ioo y z, deriv f b < (f z - f y) / (z - y) :=
-      StrictMonoOn.exists_deriv_lt_slope (hf.mono hyzD) hyz (hf'.mono hyzD')
-    apply ha.trans (lt_trans _ hb)
-    exact hf' (hxyD' ⟨hxa, hay⟩) (hyzD' ⟨hyb, hbz⟩) (hay.trans hyb)
-#align strict_mono_on.strict_convex_on_of_deriv StrictMonoOn.strictConvexOn_of_deriv
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f'` is strictly antitone on the
-interior, then `f` is strictly concave on `D`.
-Note that we don't require differentiability, since it is guaranteed at all but at most
-one point by the strict antitonicity of `f'`. -/
-theorem StrictAntiOn.strictConcaveOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (h_anti : StrictAntiOn (deriv f) (interior D)) :
-    StrictConcaveOn ℝ D f :=
-  have : StrictMonoOn (deriv (-f)) (interior D) := by simpa only [← deriv.neg] using h_anti.neg
-  neg_neg f ▸ (this.strictConvexOn_of_deriv hD hf.neg).neg
-#align strict_anti_on.strict_concave_on_of_deriv StrictAntiOn.strictConcaveOn_of_deriv
-
-/-- If a function `f` is differentiable and `f'` is monotone on `ℝ` then `f` is convex. -/
-theorem Monotone.convexOn_univ_of_deriv {f : ℝ → ℝ} (hf : Differentiable ℝ f)
-    (hf'_mono : Monotone (deriv f)) : ConvexOn ℝ univ f :=
-  (hf'_mono.monotoneOn _).convexOn_of_deriv convex_univ hf.continuous.continuousOn
-    hf.differentiableOn
-#align monotone.convex_on_univ_of_deriv Monotone.convexOn_univ_of_deriv
-
-/-- If a function `f` is differentiable and `f'` is antitone on `ℝ` then `f` is concave. -/
-theorem Antitone.concaveOn_univ_of_deriv {f : ℝ → ℝ} (hf : Differentiable ℝ f)
-    (hf'_anti : Antitone (deriv f)) : ConcaveOn ℝ univ f :=
-  (hf'_anti.antitoneOn _).concaveOn_of_deriv convex_univ hf.continuous.continuousOn
-    hf.differentiableOn
-#align antitone.concave_on_univ_of_deriv Antitone.concaveOn_univ_of_deriv
-
-/-- If a function `f` is continuous and `f'` is strictly monotone on `ℝ` then `f` is strictly
-convex. Note that we don't require differentiability, since it is guaranteed at all but at most
-one point by the strict monotonicity of `f'`. -/
-theorem StrictMono.strictConvexOn_univ_of_deriv {f : ℝ → ℝ} (hf : Continuous f)
-    (hf'_mono : StrictMono (deriv f)) : StrictConvexOn ℝ univ f :=
-  (hf'_mono.strictMonoOn _).strictConvexOn_of_deriv convex_univ hf.continuousOn
-#align strict_mono.strict_convex_on_univ_of_deriv StrictMono.strictConvexOn_univ_of_deriv
-
-/-- If a function `f` is continuous and `f'` is strictly antitone on `ℝ` then `f` is strictly
-concave. Note that we don't require differentiability, since it is guaranteed at all but at most
-one point by the strict antitonicity of `f'`. -/
-theorem StrictAnti.strictConcaveOn_univ_of_deriv {f : ℝ → ℝ} (hf : Continuous f)
-    (hf'_anti : StrictAnti (deriv f)) : StrictConcaveOn ℝ univ f :=
-  (hf'_anti.strictAntiOn _).strictConcaveOn_of_deriv convex_univ hf.continuousOn
-#align strict_anti.strict_concave_on_univ_of_deriv StrictAnti.strictConcaveOn_univ_of_deriv
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is twice differentiable on its
-interior, and `f''` is nonnegative on the interior, then `f` is convex on `D`. -/
-theorem convexOn_of_deriv2_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
-    (hf' : DifferentiableOn ℝ f (interior D)) (hf'' : DifferentiableOn ℝ (deriv f) (interior D))
-    (hf''_nonneg : ∀ x ∈ interior D, 0 ≤ deriv^[2] f x) : ConvexOn ℝ D f :=
-  (monotoneOn_of_deriv_nonneg hD.interior hf''.continuousOn (by rwa [interior_interior]) <| by
-        rwa [interior_interior]).convexOn_of_deriv
-    hD hf hf'
-#align convex_on_of_deriv2_nonneg convexOn_of_deriv2_nonneg
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is twice differentiable on its
-interior, and `f''` is nonpositive on the interior, then `f` is concave on `D`. -/
-theorem concaveOn_of_deriv2_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
-    (hf' : DifferentiableOn ℝ f (interior D)) (hf'' : DifferentiableOn ℝ (deriv f) (interior D))
-    (hf''_nonpos : ∀ x ∈ interior D, deriv^[2] f x ≤ 0) : ConcaveOn ℝ D f :=
-  (antitoneOn_of_deriv_nonpos hD.interior hf''.continuousOn (by rwa [interior_interior]) <| by
-        rwa [interior_interior]).concaveOn_of_deriv
-    hD hf hf'
-#align concave_on_of_deriv2_nonpos concaveOn_of_deriv2_nonpos
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is twice differentiable on its
-interior, and `f''` is nonnegative on the interior, then `f` is convex on `D`. -/
-lemma convexOn_of_hasDerivWithinAt2_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f f' f'' : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
-    (hf'' : ∀ x ∈ interior D, HasDerivWithinAt f' (f'' x) (interior D) x)
-    (hf''₀ : ∀ x ∈ interior D, 0 ≤ f'' x) : ConvexOn ℝ D f := by
-  have : (interior D).EqOn (deriv f) f' := deriv_eqOn isOpen_interior hf'
-  refine convexOn_of_deriv2_nonneg hD hf (fun x hx ↦ (hf' _ hx).differentiableWithinAt) ?_ ?_
-  · rw [differentiableOn_congr this]
-    exact fun x hx ↦ (hf'' _ hx).differentiableWithinAt
-  · rintro x hx
-    convert hf''₀ _ hx using 1
-    dsimp
-    rw [deriv_eqOn isOpen_interior (fun y hy ↦ ?_) hx]
-    exact (hf'' _ hy).congr this $ by rw [this hy]
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is twice differentiable on its
-interior, and `f''` is nonpositive on the interior, then `f` is concave on `D`. -/
-lemma concaveOn_of_hasDerivWithinAt2_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f f' f'' : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
-    (hf'' : ∀ x ∈ interior D, HasDerivWithinAt f' (f'' x) (interior D) x)
-    (hf''₀ : ∀ x ∈ interior D, f'' x ≤ 0) : ConcaveOn ℝ D f := by
-  have : (interior D).EqOn (deriv f) f' := deriv_eqOn isOpen_interior hf'
-  refine concaveOn_of_deriv2_nonpos hD hf (fun x hx ↦ (hf' _ hx).differentiableWithinAt) ?_ ?_
-  · rw [differentiableOn_congr this]
-    exact fun x hx ↦ (hf'' _ hx).differentiableWithinAt
-  · rintro x hx
-    convert hf''₀ _ hx using 1
-    dsimp
-    rw [deriv_eqOn isOpen_interior (fun y hy ↦ ?_) hx]
-    exact (hf'' _ hy).congr this $ by rw [this hy]
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly positive on the
-interior, then `f` is strictly convex on `D`.
-Note that we don't require twice differentiability explicitly as it is already implied by the second
-derivative being strictly positive, except at at most one point. -/
-theorem strictConvexOn_of_deriv2_pos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ interior D, 0 < (deriv^[2] f) x) :
-    StrictConvexOn ℝ D f :=
-  ((strictMonoOn_of_deriv_pos hD.interior fun z hz =>
-          (differentiableAt_of_deriv_ne_zero
-                (hf'' z hz).ne').differentiableWithinAt.continuousWithinAt) <|
-        by rwa [interior_interior]).strictConvexOn_of_deriv
-    hD hf
-#align strict_convex_on_of_deriv2_pos strictConvexOn_of_deriv2_pos
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly negative on the
-interior, then `f` is strictly concave on `D`.
-Note that we don't require twice differentiability explicitly as it already implied by the second
-derivative being strictly negative, except at at most one point. -/
-theorem strictConcaveOn_of_deriv2_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ interior D, deriv^[2] f x < 0) :
-    StrictConcaveOn ℝ D f :=
-  ((strictAntiOn_of_deriv_neg hD.interior fun z hz =>
-          (differentiableAt_of_deriv_ne_zero
-                (hf'' z hz).ne).differentiableWithinAt.continuousWithinAt) <|
-        by rwa [interior_interior]).strictConcaveOn_of_deriv
-    hD hf
-#align strict_concave_on_of_deriv2_neg strictConcaveOn_of_deriv2_neg
-
-/-- If a function `f` is twice differentiable on an open convex set `D ⊆ ℝ` and
-`f''` is nonnegative on `D`, then `f` is convex on `D`. -/
-theorem convexOn_of_deriv2_nonneg' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf' : DifferentiableOn ℝ f D) (hf'' : DifferentiableOn ℝ (deriv f) D)
-    (hf''_nonneg : ∀ x ∈ D, 0 ≤ (deriv^[2] f) x) : ConvexOn ℝ D f :=
-  convexOn_of_deriv2_nonneg hD hf'.continuousOn (hf'.mono interior_subset)
-    (hf''.mono interior_subset) fun x hx => hf''_nonneg x (interior_subset hx)
-#align convex_on_of_deriv2_nonneg' convexOn_of_deriv2_nonneg'
-
-/-- If a function `f` is twice differentiable on an open convex set `D ⊆ ℝ` and
-`f''` is nonpositive on `D`, then `f` is concave on `D`. -/
-theorem concaveOn_of_deriv2_nonpos' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf' : DifferentiableOn ℝ f D) (hf'' : DifferentiableOn ℝ (deriv f) D)
-    (hf''_nonpos : ∀ x ∈ D, deriv^[2] f x ≤ 0) : ConcaveOn ℝ D f :=
-  concaveOn_of_deriv2_nonpos hD hf'.continuousOn (hf'.mono interior_subset)
-    (hf''.mono interior_subset) fun x hx => hf''_nonpos x (interior_subset hx)
-#align concave_on_of_deriv2_nonpos' concaveOn_of_deriv2_nonpos'
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly positive on `D`,
-then `f` is strictly convex on `D`.
-Note that we don't require twice differentiability explicitly as it is already implied by the second
-derivative being strictly positive, except at at most one point. -/
-theorem strictConvexOn_of_deriv2_pos' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ D, 0 < (deriv^[2] f) x) : StrictConvexOn ℝ D f :=
-  strictConvexOn_of_deriv2_pos hD hf fun x hx => hf'' x (interior_subset hx)
-#align strict_convex_on_of_deriv2_pos' strictConvexOn_of_deriv2_pos'
-
-/-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly negative on `D`,
-then `f` is strictly concave on `D`.
-Note that we don't require twice differentiability explicitly as it is already implied by the second
-derivative being strictly negative, except at at most one point. -/
-theorem strictConcaveOn_of_deriv2_neg' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ D, deriv^[2] f x < 0) : StrictConcaveOn ℝ D f :=
-  strictConcaveOn_of_deriv2_neg hD hf fun x hx => hf'' x (interior_subset hx)
-#align strict_concave_on_of_deriv2_neg' strictConcaveOn_of_deriv2_neg'
-
-/-- If a function `f` is twice differentiable on `ℝ`, and `f''` is nonnegative on `ℝ`,
-then `f` is convex on `ℝ`. -/
-theorem convexOn_univ_of_deriv2_nonneg {f : ℝ → ℝ} (hf' : Differentiable ℝ f)
-    (hf'' : Differentiable ℝ (deriv f)) (hf''_nonneg : ∀ x, 0 ≤ (deriv^[2] f) x) :
-    ConvexOn ℝ univ f :=
-  convexOn_of_deriv2_nonneg' convex_univ hf'.differentiableOn hf''.differentiableOn fun x _ =>
-    hf''_nonneg x
-#align convex_on_univ_of_deriv2_nonneg convexOn_univ_of_deriv2_nonneg
-
-/-- If a function `f` is twice differentiable on `ℝ`, and `f''` is nonpositive on `ℝ`,
-then `f` is concave on `ℝ`. -/
-theorem concaveOn_univ_of_deriv2_nonpos {f : ℝ → ℝ} (hf' : Differentiable ℝ f)
-    (hf'' : Differentiable ℝ (deriv f)) (hf''_nonpos : ∀ x, deriv^[2] f x ≤ 0) :
-    ConcaveOn ℝ univ f :=
-  concaveOn_of_deriv2_nonpos' convex_univ hf'.differentiableOn hf''.differentiableOn fun x _ =>
-    hf''_nonpos x
-#align concave_on_univ_of_deriv2_nonpos concaveOn_univ_of_deriv2_nonpos
-
-/-- If a function `f` is continuous on `ℝ`, and `f''` is strictly positive on `ℝ`,
-then `f` is strictly convex on `ℝ`.
-Note that we don't require twice differentiability explicitly as it is already implied by the second
-derivative being strictly positive, except at at most one point. -/
-theorem strictConvexOn_univ_of_deriv2_pos {f : ℝ → ℝ} (hf : Continuous f)
-    (hf'' : ∀ x, 0 < (deriv^[2] f) x) : StrictConvexOn ℝ univ f :=
-  strictConvexOn_of_deriv2_pos' convex_univ hf.continuousOn fun x _ => hf'' x
-#align strict_convex_on_univ_of_deriv2_pos strictConvexOn_univ_of_deriv2_pos
-
-/-- If a function `f` is continuous on `ℝ`, and `f''` is strictly negative on `ℝ`,
-then `f` is strictly concave on `ℝ`.
-Note that we don't require twice differentiability explicitly as it is already implied by the second
-derivative being strictly negative, except at at most one point. -/
-theorem strictConcaveOn_univ_of_deriv2_neg {f : ℝ → ℝ} (hf : Continuous f)
-    (hf'' : ∀ x, deriv^[2] f x < 0) : StrictConcaveOn ℝ univ f :=
-  strictConcaveOn_of_deriv2_neg' convex_univ hf.continuousOn fun x _ => hf'' x
-#align strict_concave_on_univ_of_deriv2_neg strictConcaveOn_univ_of_deriv2_neg
-
 /-! ### Functions `f : E → ℝ` -/
 
-
 /-- Lagrange's **Mean Value Theorem**, applied to convex domains. -/
 theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ] ℝ}
     (hf : ∀ x ∈ s, HasFDerivWithinAt f (f' x) s x) (hs : Convex ℝ s) (xs : x ∈ s) (ys : y ∈ s) :
chore: Remove ball and bex from lemma names (#10816)

ball for "bounded forall" and bex for "bounded exists" are from experience very confusing abbreviations. This PR renames them to forall_mem and exists_mem in the few Set lemma names that mention them.

Also deprecate ball_image_of_ball, mem_image_elim, mem_image_elim_on since those lemmas are duplicates of the renamed lemmas (apart from argument order and implicitness, which I am also fixing by making the binder in the RHS of forall_mem_image semi-implicit), have obscure names and are completely unused.

Diff
@@ -1414,7 +1414,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
     · exact fun t Ht => (hfg t <| hsub Ht).hasDerivAt (Icc_mem_nhds Ht.1 Ht.2)
   -- reinterpret on domain
   rcases hMVT with ⟨t, Ht, hMVT'⟩
-  rw [segment_eq_image_lineMap, bex_image_iff]
+  rw [segment_eq_image_lineMap, exists_mem_image]
   refine ⟨t, hsub Ht, ?_⟩
   simpa [g] using hMVT'.symm
 #align domain_mvt domain_mvt
feat: Linear and quadratic bounds on sin and cos (#10525)

... including Jordan's inequality: 2 / π * x ≤ sin x for 0 ≤ x ≤ π / 2

From LeanAPAP

Diff
@@ -927,7 +927,7 @@ theorem strictMono_of_deriv_pos {f : ℝ → ℝ} (hf' : ∀ x, 0 < deriv f x) :
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is strictly positive,
 then `f` is a strictly monotone function on `D`. -/
-lemma StrictMonoOn_of_hasDerivWithinAt_pos {D : Set ℝ} (hD : Convex ℝ D) {f f' : ℝ → ℝ}
+lemma strictMonoOn_of_hasDerivWithinAt_pos {D : Set ℝ} (hD : Convex ℝ D) {f f' : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
     (hf'₀ : ∀ x ∈ interior D, 0 < f' x) : StrictMonoOn f D :=
   strictMonoOn_of_deriv_pos hD hf fun x hx ↦ by
@@ -1000,7 +1000,7 @@ theorem strictAnti_of_deriv_neg {f : ℝ → ℝ} (hf' : ∀ x, deriv f x < 0) :
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is strictly positive,
 then `f` is a strictly monotone function on `D`. -/
-lemma StrictAntiOn_of_hasDerivWithinAt_pos {D : Set ℝ} (hD : Convex ℝ D) {f f' : ℝ → ℝ}
+lemma strictAntiOn_of_hasDerivWithinAt_neg {D : Set ℝ} (hD : Convex ℝ D) {f f' : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
     (hf'₀ : ∀ x ∈ interior D, f' x < 0) : StrictAntiOn f D :=
   strictAntiOn_of_deriv_neg hD hf fun x hx ↦ by
@@ -1008,7 +1008,7 @@ lemma StrictAntiOn_of_hasDerivWithinAt_pos {D : Set ℝ} (hD : Convex ℝ D) {f
 
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is strictly positive, then
 `f` is a strictly monotone function. -/
-lemma strictAnti_of_hasDerivAt_pos {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt f (f' x) x)
+lemma strictAnti_of_hasDerivAt_neg {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt f (f' x) x)
     (hf' : ∀ x, f' x < 0) : StrictAnti f :=
   strictAnti_of_deriv_neg fun x ↦ by rw [(hf _).deriv]; exact hf' _
 
chore: remove stream-of-conciousness syntax for obtain (#11045)

This covers many instances, but is not exhaustive.

Independently of whether that syntax should be avoided (similar to #10534), I think all these changes are small improvements.

Diff
@@ -118,8 +118,8 @@ theorem image_le_of_liminf_slope_right_lt_deriv_boundary' {f f' : ℝ → ℝ} {
     have HB : ∀ᶠ z in 𝓝[>] x, r < slope B x z :=
       (hasDerivWithinAt_iff_tendsto_slope' <| lt_irrefl x).1 (hB' x xab).Ioi_of_Ici
         (Ioi_mem_nhds hrB)
-    obtain ⟨z, hfz, hzB, hz⟩ : ∃ z, slope f x z < r ∧ r < slope B x z ∧ z ∈ Ioc x y
-    exact (hf'.and_eventually (HB.and (Ioc_mem_nhdsWithin_Ioi ⟨le_rfl, hy⟩))).exists
+    obtain ⟨z, hfz, hzB, hz⟩ : ∃ z, slope f x z < r ∧ r < slope B x z ∧ z ∈ Ioc x y :=
+      (hf'.and_eventually (HB.and (Ioc_mem_nhdsWithin_Ioi ⟨le_rfl, hy⟩))).exists
     refine' ⟨z, _, hz⟩
     have := (hfz.trans hzB).le
     rwa [slope_def_field, slope_def_field, div_le_div_right (sub_pos.2 hz.1), hxB,
@@ -494,8 +494,9 @@ theorem exists_nhdsWithin_lipschitzOnWith_of_hasFDerivWithinAt_of_nnnorm_lt (hs
     {f : E → G} (hder : ∀ᶠ y in 𝓝[s] x, HasFDerivWithinAt f (f' y) s y)
     (hcont : ContinuousWithinAt f' s x) (K : ℝ≥0) (hK : ‖f' x‖₊ < K) :
     ∃ t ∈ 𝓝[s] x, LipschitzOnWith K f t := by
-  obtain ⟨ε, ε0, hε⟩ : ∃ ε > 0, ball x ε ∩ s ⊆ { y | HasFDerivWithinAt f (f' y) s y ∧ ‖f' y‖₊ < K }
-  exact mem_nhdsWithin_iff.1 (hder.and <| hcont.nnnorm.eventually (gt_mem_nhds hK))
+  obtain ⟨ε, ε0, hε⟩ : ∃ ε > 0,
+      ball x ε ∩ s ⊆ { y | HasFDerivWithinAt f (f' y) s y ∧ ‖f' y‖₊ < K } :=
+    mem_nhdsWithin_iff.1 (hder.and <| hcont.nnnorm.eventually (gt_mem_nhds hK))
   rw [inter_comm] at hε
   refine' ⟨s ∩ ball x ε, inter_mem_nhdsWithin _ (ball_mem_nhds _ ε0), _⟩
   exact
@@ -841,8 +842,8 @@ theorem Convex.mul_sub_le_image_sub_of_le_deriv {D : Set ℝ} (hD : Convex ℝ D
   have hxyD : Icc x y ⊆ D := hD.ordConnected.out hx hy
   have hxyD' : Ioo x y ⊆ interior D :=
     subset_sUnion_of_mem ⟨isOpen_Ioo, Ioo_subset_Icc_self.trans hxyD⟩
-  obtain ⟨a, a_mem, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x)
-  exact exists_deriv_eq_slope f hxy' (hf.mono hxyD) (hf'.mono hxyD')
+  obtain ⟨a, a_mem, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x) :=
+    exists_deriv_eq_slope f hxy' (hf.mono hxyD) (hf'.mono hxyD')
   have : C ≤ (f y - f x) / (y - x) := ha ▸ hf'_ge _ (hxyD' a_mem)
   exact (le_div_iff (sub_pos.2 hxy')).1 this
 #align convex.mul_sub_le_image_sub_of_le_deriv Convex.mul_sub_le_image_sub_of_le_deriv
@@ -1063,10 +1064,10 @@ theorem MonotoneOn.convexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ
       have hyzD' : Ioo y z ⊆ interior D :=
         subset_sUnion_of_mem ⟨isOpen_Ioo, Ioo_subset_Icc_self.trans hyzD⟩
       -- Then we apply MVT to both `[x, y]` and `[y, z]`
-      obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x)
-      exact exists_deriv_eq_slope f hxy (hf.mono hxyD) (hf'.mono hxyD')
-      obtain ⟨b, ⟨hyb, hbz⟩, hb⟩ : ∃ b ∈ Ioo y z, deriv f b = (f z - f y) / (z - y)
-      exact exists_deriv_eq_slope f hyz (hf.mono hyzD) (hf'.mono hyzD')
+      obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x) :=
+        exists_deriv_eq_slope f hxy (hf.mono hxyD) (hf'.mono hxyD')
+      obtain ⟨b, ⟨hyb, hbz⟩, hb⟩ : ∃ b ∈ Ioo y z, deriv f b = (f z - f y) / (z - y) :=
+        exists_deriv_eq_slope f hyz (hf.mono hyzD) (hf'.mono hyzD')
       rw [← ha, ← hb]
       exact hf'_mono (hxyD' ⟨hxa, hay⟩) (hyzD' ⟨hyb, hbz⟩) (hay.trans hyb).le)
 #align monotone_on.convex_on_of_deriv MonotoneOn.convexOn_of_deriv
@@ -1086,8 +1087,8 @@ theorem StrictMonoOn.exists_slope_lt_deriv_aux {x y : ℝ} {f : ℝ → ℝ} (hf
     ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a := by
   have A : DifferentiableOn ℝ f (Ioo x y) := fun w wmem =>
     (differentiableAt_of_deriv_ne_zero (h w wmem)).differentiableWithinAt
-  obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x)
-  exact exists_deriv_eq_slope f hxy hf A
+  obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x) :=
+    exists_deriv_eq_slope f hxy hf A
   rcases nonempty_Ioo.2 hay with ⟨b, ⟨hab, hby⟩⟩
   refine' ⟨b, ⟨hxa.trans hab, hby⟩, _⟩
   rw [← ha]
@@ -1132,8 +1133,8 @@ theorem StrictMonoOn.exists_deriv_lt_slope_aux {x y : ℝ} {f : ℝ → ℝ} (hf
     ∃ a ∈ Ioo x y, deriv f a < (f y - f x) / (y - x) := by
   have A : DifferentiableOn ℝ f (Ioo x y) := fun w wmem =>
     (differentiableAt_of_deriv_ne_zero (h w wmem)).differentiableWithinAt
-  obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x)
-  exact exists_deriv_eq_slope f hxy hf A
+  obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, deriv f a = (f y - f x) / (y - x) :=
+    exists_deriv_eq_slope f hxy hf A
   rcases nonempty_Ioo.2 hxa with ⟨b, ⟨hxb, hba⟩⟩
   refine' ⟨b, ⟨hxb, hba.trans hay⟩, _⟩
   rw [← ha]
@@ -1190,10 +1191,10 @@ theorem StrictMonoOn.strictConvexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {
       subset_sUnion_of_mem ⟨isOpen_Ioo, Ioo_subset_Icc_self.trans hyzD⟩
     -- Then we get points `a` and `b` in each interval `[x, y]` and `[y, z]` where the derivatives
     -- can be compared to the slopes between `x, y` and `y, z` respectively.
-    obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a
-    · exact StrictMonoOn.exists_slope_lt_deriv (hf.mono hxyD) hxy (hf'.mono hxyD')
-    obtain ⟨b, ⟨hyb, hbz⟩, hb⟩ : ∃ b ∈ Ioo y z, deriv f b < (f z - f y) / (z - y)
-    · exact StrictMonoOn.exists_deriv_lt_slope (hf.mono hyzD) hyz (hf'.mono hyzD')
+    obtain ⟨a, ⟨hxa, hay⟩, ha⟩ : ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a :=
+      StrictMonoOn.exists_slope_lt_deriv (hf.mono hxyD) hxy (hf'.mono hxyD')
+    obtain ⟨b, ⟨hyb, hbz⟩, hb⟩ : ∃ b ∈ Ioo y z, deriv f b < (f z - f y) / (z - y) :=
+      StrictMonoOn.exists_deriv_lt_slope (hf.mono hyzD) hyz (hf'.mono hyzD')
     apply ha.trans (lt_trans _ hb)
     exact hf' (hxyD' ⟨hxa, hay⟩) (hyzD' ⟨hyb, hbz⟩) (hay.trans hyb)
 #align strict_mono_on.strict_convex_on_of_deriv StrictMonoOn.strictConvexOn_of_deriv
chore: prepare Lean version bump with explicit simp (#10999)

Co-authored-by: Scott Morrison <scott.morrison@gmail.com>

Diff
@@ -102,7 +102,7 @@ theorem image_le_of_liminf_slope_right_lt_deriv_boundary' {f f' : ℝ → ℝ} {
   set s := { x | f x ≤ B x } ∩ Icc a b
   have A : ContinuousOn (fun x => (f x, B x)) (Icc a b) := hf.prod hB
   have : IsClosed s := by
-    simp only [inter_comm]
+    simp only [s, inter_comm]
     exact A.preimage_isClosed_of_isClosed isClosed_Icc OrderClosedTopology.isClosed_le'
   apply this.Icc_subset_of_forall_exists_gt ha
   rintro x ⟨hxB : f x ≤ B x, xab⟩ y hy
@@ -349,7 +349,7 @@ theorem norm_image_sub_le_of_norm_deriv_right_le_segment {f' : ℝ → E} {C : 
     intro x
     simpa using (hasDerivAt_const x C).mul ((hasDerivAt_id x).sub (hasDerivAt_const x a))
   convert image_norm_le_of_norm_deriv_right_le_deriv_boundary hg hg' _ hB bound
-  simp only; rw [sub_self, norm_zero, sub_self, mul_zero]
+  simp only [g, B]; rw [sub_self, norm_zero, sub_self, mul_zero]
 #align norm_image_sub_le_of_norm_deriv_right_le_segment norm_image_sub_le_of_norm_deriv_right_le_segment
 
 /-- A function on `[a, b]` with the norm of the derivative within `[a, b]`
@@ -470,7 +470,7 @@ theorem norm_image_sub_le_of_norm_hasFDerivWithin_le
       AffineMap.hasDerivWithinAt_lineMap segm
   have bound : ∀ t ∈ Ico (0 : ℝ) 1, ‖f' (g t) (y - x)‖ ≤ C * ‖y - x‖ := fun t ht =>
     le_of_opNorm_le _ (bound _ <| segm <| Ico_subset_Icc_self ht) _
-  simpa using norm_image_sub_le_of_norm_deriv_le_segment_01' hD bound
+  simpa [g] using norm_image_sub_le_of_norm_deriv_le_segment_01' hD bound
 #align convex.norm_image_sub_le_of_norm_has_fderiv_within_le Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le
 
 /-- The mean value theorem on a convex set: if the derivative of a function is bounded by `C` on
@@ -729,7 +729,7 @@ variable (f f' : ℝ → ℝ) {a b : ℝ} (hab : a < b) (hfc : ContinuousOn f (I
 theorem exists_ratio_hasDerivAt_eq_ratio_slope :
     ∃ c ∈ Ioo a b, (g b - g a) * f' c = (f b - f a) * g' c := by
   let h x := (g b - g a) * f x - (f b - f a) * g x
-  have hI : h a = h b := by simp only; ring
+  have hI : h a = h b := by simp only [h]; ring
   let h' x := (g b - g a) * f' x - (f b - f a) * g' x
   have hhh' : ∀ x ∈ Ioo a b, HasDerivAt h (h' x) x := fun x hx =>
     ((hff' x hx).const_mul (g b - g a)).sub ((hgg' x hx).const_mul (f b - f a))
@@ -1415,7 +1415,7 @@ theorem domain_mvt {f : E → ℝ} {s : Set E} {x y : E} {f' : E → E →L[ℝ]
   rcases hMVT with ⟨t, Ht, hMVT'⟩
   rw [segment_eq_image_lineMap, bex_image_iff]
   refine ⟨t, hsub Ht, ?_⟩
-  simpa using hMVT'.symm
+  simpa [g] using hMVT'.symm
 #align domain_mvt domain_mvt
 
 section IsROrC
feat: Variants of monotonicity from existence of a derivative (#10229)

This PR provides variants of deriv lemmas stated in terms of HasDerivAt

From LeanAPAP

Diff
@@ -57,8 +57,8 @@ In this file we prove the following facts:
   `Convex.image_sub_le_mul_sub_of_deriv_le`, `Convex.mul_sub_le_image_sub_of_le_deriv`,
   if `∀ x, C (</≤/>/≥) (f' x)`, then `C * (y - x) (</≤/>/≥) (f y - f x)` whenever `x < y`.
 
-* `Convex.monotoneOn_of_deriv_nonneg`, `Convex.antitoneOn_of_deriv_nonpos`,
-  `Convex.strictMono_of_deriv_pos`, `Convex.strictAnti_of_deriv_neg` :
+* `monotoneOn_of_deriv_nonneg`, `antitoneOn_of_deriv_nonpos`,
+  `strictMono_of_deriv_pos`, `strictAnti_of_deriv_neg` :
   if the derivative of a function is non-negative/non-positive/positive/negative, then
   the function is monotone/antitone/strictly monotone/strictly monotonically
   decreasing.
@@ -904,87 +904,148 @@ of the real line. If `f` is differentiable on the interior of `D` and `f'` is po
 `f` is a strictly monotone function on `D`.
 Note that we don't require differentiability explicitly as it already implied by the derivative
 being strictly positive. -/
-theorem Convex.strictMonoOn_of_deriv_pos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
+theorem strictMonoOn_of_deriv_pos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, 0 < deriv f x) : StrictMonoOn f D := by
   intro x hx y hy
   have : DifferentiableOn ℝ f (interior D) := fun z hz =>
     (differentiableAt_of_deriv_ne_zero (hf' z hz).ne').differentiableWithinAt
   simpa only [zero_mul, sub_pos] using
     hD.mul_sub_lt_image_sub_of_lt_deriv hf this hf' x hx y hy
-#align convex.strict_mono_on_of_deriv_pos Convex.strictMonoOn_of_deriv_pos
+#align convex.strict_mono_on_of_deriv_pos strictMonoOn_of_deriv_pos
 
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is positive, then
 `f` is a strictly monotone function.
 Note that we don't require differentiability explicitly as it already implied by the derivative
 being strictly positive. -/
 theorem strictMono_of_deriv_pos {f : ℝ → ℝ} (hf' : ∀ x, 0 < deriv f x) : StrictMono f :=
-  strictMonoOn_univ.1 <| convex_univ.strictMonoOn_of_deriv_pos (fun z _ =>
+  strictMonoOn_univ.1 <| strictMonoOn_of_deriv_pos convex_univ (fun z _ =>
     (differentiableAt_of_deriv_ne_zero (hf' z).ne').differentiableWithinAt.continuousWithinAt)
     fun x _ => hf' x
 #align strict_mono_of_deriv_pos strictMono_of_deriv_pos
 
+/-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
+of the real line. If `f` is differentiable on the interior of `D` and `f'` is strictly positive,
+then `f` is a strictly monotone function on `D`. -/
+lemma StrictMonoOn_of_hasDerivWithinAt_pos {D : Set ℝ} (hD : Convex ℝ D) {f f' : ℝ → ℝ}
+    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
+    (hf'₀ : ∀ x ∈ interior D, 0 < f' x) : StrictMonoOn f D :=
+  strictMonoOn_of_deriv_pos hD hf fun x hx ↦ by
+    rw [deriv_eqOn isOpen_interior hf' hx]; exact hf'₀ _ hx
+
+/-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is strictly positive, then
+`f` is a strictly monotone function. -/
+lemma strictMono_of_hasDerivAt_pos {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt f (f' x) x)
+    (hf' : ∀ x, 0 < f' x) : StrictMono f :=
+  strictMono_of_deriv_pos fun x ↦ by rw [(hf _).deriv]; exact hf' _
+
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonnegative, then
 `f` is a monotone function on `D`. -/
-theorem Convex.monotoneOn_of_deriv_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
+theorem monotoneOn_of_deriv_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : DifferentiableOn ℝ f (interior D))
     (hf'_nonneg : ∀ x ∈ interior D, 0 ≤ deriv f x) : MonotoneOn f D := fun x hx y hy hxy => by
   simpa only [zero_mul, sub_nonneg] using
     hD.mul_sub_le_image_sub_of_le_deriv hf hf' hf'_nonneg x hx y hy hxy
-#align convex.monotone_on_of_deriv_nonneg Convex.monotoneOn_of_deriv_nonneg
+#align convex.monotone_on_of_deriv_nonneg monotoneOn_of_deriv_nonneg
 
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is nonnegative, then
 `f` is a monotone function. -/
 theorem monotone_of_deriv_nonneg {f : ℝ → ℝ} (hf : Differentiable ℝ f) (hf' : ∀ x, 0 ≤ deriv f x) :
     Monotone f :=
   monotoneOn_univ.1 <|
-    convex_univ.monotoneOn_of_deriv_nonneg hf.continuous.continuousOn hf.differentiableOn fun x _ =>
+    monotoneOn_of_deriv_nonneg convex_univ hf.continuous.continuousOn hf.differentiableOn fun x _ =>
       hf' x
 #align monotone_of_deriv_nonneg monotone_of_deriv_nonneg
 
+/-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
+of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonnegative, then
+`f` is a monotone function on `D`. -/
+lemma monotoneOn_of_hasDerivWithinAt_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f f' : ℝ → ℝ}
+    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
+    (hf'₀ : ∀ x ∈ interior D, 0 ≤ f' x) : MonotoneOn f D :=
+  monotoneOn_of_deriv_nonneg hD hf (fun x hx ↦ (hf' _ hx).differentiableWithinAt) fun x hx ↦ by
+    rw [deriv_eqOn isOpen_interior hf' hx]; exact hf'₀ _ hx
+
+/-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is nonnegative, then
+`f` is a monotone function. -/
+lemma monotone_of_hasDerivAt_nonneg {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt f (f' x) x)
+    (hf' : 0 ≤ f') : Monotone f :=
+  monotone_of_deriv_nonneg (fun x ↦ (hf _).differentiableAt) fun x ↦ by
+    rw [(hf _).deriv]; exact hf' _
+
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is negative, then
 `f` is a strictly antitone function on `D`. -/
-theorem Convex.strictAntiOn_of_deriv_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
+theorem strictAntiOn_of_deriv_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, deriv f x < 0) : StrictAntiOn f D :=
   fun x hx y => by
   simpa only [zero_mul, sub_lt_zero] using
     hD.image_sub_lt_mul_sub_of_deriv_lt hf
       (fun z hz => (differentiableAt_of_deriv_ne_zero (hf' z hz).ne).differentiableWithinAt) hf' x
       hx y
-#align convex.strict_anti_on_of_deriv_neg Convex.strictAntiOn_of_deriv_neg
+#align convex.strict_anti_on_of_deriv_neg strictAntiOn_of_deriv_neg
 
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is negative, then
 `f` is a strictly antitone function.
 Note that we don't require differentiability explicitly as it already implied by the derivative
 being strictly negative. -/
 theorem strictAnti_of_deriv_neg {f : ℝ → ℝ} (hf' : ∀ x, deriv f x < 0) : StrictAnti f :=
-  strictAntiOn_univ.1 <|
-    convex_univ.strictAntiOn_of_deriv_neg
+  strictAntiOn_univ.1 <| strictAntiOn_of_deriv_neg convex_univ
       (fun z _ =>
         (differentiableAt_of_deriv_ne_zero (hf' z).ne).differentiableWithinAt.continuousWithinAt)
       fun x _ => hf' x
 #align strict_anti_of_deriv_neg strictAnti_of_deriv_neg
 
+/-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
+of the real line. If `f` is differentiable on the interior of `D` and `f'` is strictly positive,
+then `f` is a strictly monotone function on `D`. -/
+lemma StrictAntiOn_of_hasDerivWithinAt_pos {D : Set ℝ} (hD : Convex ℝ D) {f f' : ℝ → ℝ}
+    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
+    (hf'₀ : ∀ x ∈ interior D, f' x < 0) : StrictAntiOn f D :=
+  strictAntiOn_of_deriv_neg hD hf fun x hx ↦ by
+    rw [deriv_eqOn isOpen_interior hf' hx]; exact hf'₀ _ hx
+
+/-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is strictly positive, then
+`f` is a strictly monotone function. -/
+lemma strictAnti_of_hasDerivAt_pos {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt f (f' x) x)
+    (hf' : ∀ x, f' x < 0) : StrictAnti f :=
+  strictAnti_of_deriv_neg fun x ↦ by rw [(hf _).deriv]; exact hf' _
+
 /-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
 of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonpositive, then
 `f` is an antitone function on `D`. -/
-theorem Convex.antitoneOn_of_deriv_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
+theorem antitoneOn_of_deriv_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : DifferentiableOn ℝ f (interior D))
     (hf'_nonpos : ∀ x ∈ interior D, deriv f x ≤ 0) : AntitoneOn f D := fun x hx y hy hxy => by
   simpa only [zero_mul, sub_nonpos] using
     hD.image_sub_le_mul_sub_of_deriv_le hf hf' hf'_nonpos x hx y hy hxy
-#align convex.antitone_on_of_deriv_nonpos Convex.antitoneOn_of_deriv_nonpos
+#align convex.antitone_on_of_deriv_nonpos antitoneOn_of_deriv_nonpos
 
 /-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is nonpositive, then
 `f` is an antitone function. -/
 theorem antitone_of_deriv_nonpos {f : ℝ → ℝ} (hf : Differentiable ℝ f) (hf' : ∀ x, deriv f x ≤ 0) :
     Antitone f :=
   antitoneOn_univ.1 <|
-    convex_univ.antitoneOn_of_deriv_nonpos hf.continuous.continuousOn hf.differentiableOn fun x _ =>
+    antitoneOn_of_deriv_nonpos convex_univ hf.continuous.continuousOn hf.differentiableOn fun x _ =>
       hf' x
 #align antitone_of_deriv_nonpos antitone_of_deriv_nonpos
 
+/-- Let `f` be a function continuous on a convex (or, equivalently, connected) subset `D`
+of the real line. If `f` is differentiable on the interior of `D` and `f'` is nonpositive, then
+`f` is an antitone function on `D`. -/
+lemma antitoneOn_of_hasDerivWithinAt_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f f' : ℝ → ℝ}
+    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
+    (hf'₀ : ∀ x ∈ interior D, f' x ≤ 0) : AntitoneOn f D :=
+  antitoneOn_of_deriv_nonpos hD hf (fun x hx ↦ (hf' _ hx).differentiableWithinAt) fun x hx ↦ by
+    rw [deriv_eqOn isOpen_interior hf' hx]; exact hf'₀ _ hx
+
+/-- Let `f : ℝ → ℝ` be a differentiable function. If `f'` is nonpositive, then `f` is an antitone
+function. -/
+lemma antitone_of_hasDerivAt_nonpos {f f' : ℝ → ℝ} (hf : ∀ x, HasDerivAt f (f' x) x)
+    (hf' : f' ≤ 0) : Antitone f :=
+  antitone_of_deriv_nonpos (fun x ↦ (hf _).differentiableAt) fun x ↦ by
+    rw [(hf _).deriv]; exact hf' _
+
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is differentiable on its interior,
 and `f'` is monotone on the interior, then `f` is convex on `D`. -/
 theorem MonotoneOn.convexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
@@ -1183,7 +1244,7 @@ interior, and `f''` is nonnegative on the interior, then `f` is convex on `D`. -
 theorem convexOn_of_deriv2_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
     (hf' : DifferentiableOn ℝ f (interior D)) (hf'' : DifferentiableOn ℝ (deriv f) (interior D))
     (hf''_nonneg : ∀ x ∈ interior D, 0 ≤ deriv^[2] f x) : ConvexOn ℝ D f :=
-  (hD.interior.monotoneOn_of_deriv_nonneg hf''.continuousOn (by rwa [interior_interior]) <| by
+  (monotoneOn_of_deriv_nonneg hD.interior hf''.continuousOn (by rwa [interior_interior]) <| by
         rwa [interior_interior]).convexOn_of_deriv
     hD hf hf'
 #align convex_on_of_deriv2_nonneg convexOn_of_deriv2_nonneg
@@ -1193,11 +1254,43 @@ interior, and `f''` is nonpositive on the interior, then `f` is concave on `D`.
 theorem concaveOn_of_deriv2_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
     (hf' : DifferentiableOn ℝ f (interior D)) (hf'' : DifferentiableOn ℝ (deriv f) (interior D))
     (hf''_nonpos : ∀ x ∈ interior D, deriv^[2] f x ≤ 0) : ConcaveOn ℝ D f :=
-  (hD.interior.antitoneOn_of_deriv_nonpos hf''.continuousOn (by rwa [interior_interior]) <| by
+  (antitoneOn_of_deriv_nonpos hD.interior hf''.continuousOn (by rwa [interior_interior]) <| by
         rwa [interior_interior]).concaveOn_of_deriv
     hD hf hf'
 #align concave_on_of_deriv2_nonpos concaveOn_of_deriv2_nonpos
 
+/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is twice differentiable on its
+interior, and `f''` is nonnegative on the interior, then `f` is convex on `D`. -/
+lemma convexOn_of_hasDerivWithinAt2_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f f' f'' : ℝ → ℝ}
+    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
+    (hf'' : ∀ x ∈ interior D, HasDerivWithinAt f' (f'' x) (interior D) x)
+    (hf''₀ : ∀ x ∈ interior D, 0 ≤ f'' x) : ConvexOn ℝ D f := by
+  have : (interior D).EqOn (deriv f) f' := deriv_eqOn isOpen_interior hf'
+  refine convexOn_of_deriv2_nonneg hD hf (fun x hx ↦ (hf' _ hx).differentiableWithinAt) ?_ ?_
+  · rw [differentiableOn_congr this]
+    exact fun x hx ↦ (hf'' _ hx).differentiableWithinAt
+  · rintro x hx
+    convert hf''₀ _ hx using 1
+    dsimp
+    rw [deriv_eqOn isOpen_interior (fun y hy ↦ ?_) hx]
+    exact (hf'' _ hy).congr this $ by rw [this hy]
+
+/-- If a function `f` is continuous on a convex set `D ⊆ ℝ`, is twice differentiable on its
+interior, and `f''` is nonpositive on the interior, then `f` is concave on `D`. -/
+lemma concaveOn_of_hasDerivWithinAt2_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f f' f'' : ℝ → ℝ}
+    (hf : ContinuousOn f D) (hf' : ∀ x ∈ interior D, HasDerivWithinAt f (f' x) (interior D) x)
+    (hf'' : ∀ x ∈ interior D, HasDerivWithinAt f' (f'' x) (interior D) x)
+    (hf''₀ : ∀ x ∈ interior D, f'' x ≤ 0) : ConcaveOn ℝ D f := by
+  have : (interior D).EqOn (deriv f) f' := deriv_eqOn isOpen_interior hf'
+  refine concaveOn_of_deriv2_nonpos hD hf (fun x hx ↦ (hf' _ hx).differentiableWithinAt) ?_ ?_
+  · rw [differentiableOn_congr this]
+    exact fun x hx ↦ (hf'' _ hx).differentiableWithinAt
+  · rintro x hx
+    convert hf''₀ _ hx using 1
+    dsimp
+    rw [deriv_eqOn isOpen_interior (fun y hy ↦ ?_) hx]
+    exact (hf'' _ hy).congr this $ by rw [this hy]
+
 /-- If a function `f` is continuous on a convex set `D ⊆ ℝ` and `f''` is strictly positive on the
 interior, then `f` is strictly convex on `D`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
@@ -1205,7 +1298,7 @@ derivative being strictly positive, except at at most one point. -/
 theorem strictConvexOn_of_deriv2_pos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf'' : ∀ x ∈ interior D, 0 < (deriv^[2] f) x) :
     StrictConvexOn ℝ D f :=
-  ((hD.interior.strictMonoOn_of_deriv_pos fun z hz =>
+  ((strictMonoOn_of_deriv_pos hD.interior fun z hz =>
           (differentiableAt_of_deriv_ne_zero
                 (hf'' z hz).ne').differentiableWithinAt.continuousWithinAt) <|
         by rwa [interior_interior]).strictConvexOn_of_deriv
@@ -1219,7 +1312,7 @@ derivative being strictly negative, except at at most one point. -/
 theorem strictConcaveOn_of_deriv2_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf'' : ∀ x ∈ interior D, deriv^[2] f x < 0) :
     StrictConcaveOn ℝ D f :=
-  ((hD.interior.strictAntiOn_of_deriv_neg fun z hz =>
+  ((strictAntiOn_of_deriv_neg hD.interior fun z hz =>
           (differentiableAt_of_deriv_ne_zero
                 (hf'' z hz).ne).differentiableWithinAt.continuousWithinAt) <|
         by rwa [interior_interior]).strictConcaveOn_of_deriv
chore: rename op_norm to opNorm (#10185)

Co-authored-by: adomani <adomani@gmail.com>

Diff
@@ -469,7 +469,7 @@ theorem norm_image_sub_le_of_norm_hasFDerivWithin_le
     simpa using ((hf (g t) (segm ht)).restrictScalars ℝ).comp_hasDerivWithinAt _
       AffineMap.hasDerivWithinAt_lineMap segm
   have bound : ∀ t ∈ Ico (0 : ℝ) 1, ‖f' (g t) (y - x)‖ ≤ C * ‖y - x‖ := fun t ht =>
-    le_of_op_norm_le _ (bound _ <| segm <| Ico_subset_Icc_self ht) _
+    le_of_opNorm_le _ (bound _ <| segm <| Ico_subset_Icc_self ht) _
   simpa using norm_image_sub_le_of_norm_deriv_le_segment_01' hD bound
 #align convex.norm_image_sub_le_of_norm_has_fderiv_within_le Convex.norm_image_sub_le_of_norm_hasFDerivWithin_le
 
chore(*): drop $/<| before fun (#9361)

Subset of #9319

Diff
@@ -1118,7 +1118,7 @@ Note that we don't require differentiability, since it is guaranteed at all but
 one point by the strict monotonicity of `f'`. -/
 theorem StrictMonoOn.strictConvexOn_of_deriv {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf : ContinuousOn f D) (hf' : StrictMonoOn (deriv f) (interior D)) : StrictConvexOn ℝ D f :=
-  strictConvexOn_of_slope_strict_mono_adjacent hD <| fun {x y z} hx hz hxy hyz => by
+  strictConvexOn_of_slope_strict_mono_adjacent hD fun {x y z} hx hz hxy hyz => by
     -- First we prove some trivial inclusions
     have hxzD : Icc x z ⊆ D := hD.ordConnected.out hx hz
     have hxyD : Icc x y ⊆ D := (Icc_subset_Icc_right hyz.le).trans hxzD
chore: rename most lemmas involving clopen to isClopen (#8720)

This PR renames the field Clopens.clopen' -> Clopens.isClopen', and the lemmas

  • preimage_closed_of_closed -> ContinuousOn.preimage_isClosed_of_isClosed

as well as: ClopenUpperSet.clopen -> ClopenUpperSet.isClopen connectedComponent_eq_iInter_clopen -> connectedComponent_eq_iInter_isClopen connectedComponent_subset_iInter_clopen -> connectedComponent_subset_iInter_isClopen continuous_boolIndicator_iff_clopen -> continuous_boolIndicator_iff_isClopen continuousOn_boolIndicator_iff_clopen -> continuousOn_boolIndicator_iff_isClopen DiscreteQuotient.ofClopen -> DiscreteQuotient.ofIsClopen disjoint_or_subset_of_clopen -> disjoint_or_subset_of_isClopen exists_clopen_{lower,upper}of_not_le -> exists_isClopen{lower,upper}_of_not_le exists_clopen_of_cofiltered -> exists_isClopen_of_cofiltered exists_clopen_of_totally_separated -> exists_isClopen_of_totally_separated exists_clopen_upper_or_lower_of_ne -> exists_isClopen_upper_or_lower_of_ne IsPreconnected.subset_clopen -> IsPreconnected.subset_isClopen isTotallyDisconnected_of_clopen_set -> isTotallyDisconnected_of_isClopen_set LocallyConstant.ofClopen_fiber_one -> LocallyConstant.ofIsClopen_fiber_one LocallyConstant.ofClopen_fiber_zero -> LocallyConstant.ofIsClopen_fiber_zero LocallyConstant.ofClopen -> LocallyConstant.ofIsClopen preimage_clopen_of_clopen -> preimage_isClopen_of_isClopen TopologicalSpace.Clopens.clopen -> TopologicalSpace.Clopens.isClopen

Diff
@@ -103,7 +103,7 @@ theorem image_le_of_liminf_slope_right_lt_deriv_boundary' {f f' : ℝ → ℝ} {
   have A : ContinuousOn (fun x => (f x, B x)) (Icc a b) := hf.prod hB
   have : IsClosed s := by
     simp only [inter_comm]
-    exact A.preimage_closed_of_closed isClosed_Icc OrderClosedTopology.isClosed_le'
+    exact A.preimage_isClosed_of_isClosed isClosed_Icc OrderClosedTopology.isClosed_le'
   apply this.Icc_subset_of_forall_exists_gt ha
   rintro x ⟨hxB : f x ≤ B x, xab⟩ y hy
   cases' hxB.lt_or_eq with hxB hxB
feat: cleanup API around differentiable functions (#7004)

First calculus prerequisites for Rademacher theorem in #7003.

Add a few lemmas that were available for FDeriv but not for Deriv, weaken assumptions here and there.

Diff
@@ -550,6 +550,16 @@ theorem lipschitzOnWith_of_nnnorm_fderiv_le {C : ℝ≥0} (hf : ∀ x ∈ s, Dif
     (fun x hx => (hf x hx).hasFDerivAt.hasFDerivWithinAt) bound
 #align convex.lipschitz_on_with_of_nnnorm_fderiv_le Convex.lipschitzOnWith_of_nnnorm_fderiv_le
 
+/-- The mean value theorem: if the derivative of a function is bounded by `C`, then the function is
+`C`-Lipschitz. Version with `fderiv` and `LipschitzWith`. -/
+theorem _root_.lipschitzWith_of_nnnorm_fderiv_le
+    {E : Type*} [NormedAddCommGroup E] [NormedSpace 𝕜 E] {f : E → G}
+    {C : ℝ≥0} (hf : Differentiable 𝕜 f)
+    (bound : ∀ x, ‖fderiv 𝕜 f x‖₊ ≤ C) : LipschitzWith C f := by
+  let A : NormedSpace ℝ E := RestrictScalars.normedSpace ℝ 𝕜 E
+  rw [← lipschitzOn_univ]
+  exact lipschitzOnWith_of_nnnorm_fderiv_le (fun x _ ↦ hf x) (fun x _ ↦ bound x) convex_univ
+
 /-- Variant of the mean value inequality on a convex set, using a bound on the difference between
 the derivative and a fixed linear map, rather than a bound on the derivative itself. Version with
 `HasFDerivWithinAt`. -/
@@ -595,9 +605,12 @@ theorem is_const_of_fderivWithin_eq_zero (hs : Convex ℝ s) (hf : Differentiabl
     hs.norm_image_sub_le_of_norm_fderivWithin_le hf bound hx hy
 #align convex.is_const_of_fderiv_within_eq_zero Convex.is_const_of_fderivWithin_eq_zero
 
-theorem _root_.is_const_of_fderiv_eq_zero (hf : Differentiable 𝕜 f) (hf' : ∀ x, fderiv 𝕜 f x = 0)
-    (x y : E) : f x = f y :=
-  convex_univ.is_const_of_fderivWithin_eq_zero hf.differentiableOn
+theorem _root_.is_const_of_fderiv_eq_zero
+    {E : Type*} [NormedAddCommGroup E] [NormedSpace 𝕜 E] {f : E → G}
+    (hf : Differentiable 𝕜 f) (hf' : ∀ x, fderiv 𝕜 f x = 0)
+    (x y : E) : f x = f y := by
+  let A : NormedSpace ℝ E := RestrictScalars.normedSpace ℝ 𝕜 E
+  exact convex_univ.is_const_of_fderivWithin_eq_zero hf.differentiableOn
     (fun x _ => by rw [fderivWithin_univ]; exact hf' x) trivial trivial
 #align is_const_of_fderiv_eq_zero is_const_of_fderiv_eq_zero
 
@@ -612,11 +625,14 @@ theorem eqOn_of_fderivWithin_eq (hs : Convex ℝ s) (hf : DifferentiableOn 𝕜
   rw [fderivWithin_sub (hs' _ hz) (hf _ hz) (hg _ hz), sub_eq_zero, hf' _ hz]
 #align convex.eq_on_of_fderiv_within_eq Convex.eqOn_of_fderivWithin_eq
 
-theorem _root_.eq_of_fderiv_eq (hf : Differentiable 𝕜 f) (hg : Differentiable 𝕜 g)
-    (hf' : ∀ x, fderiv 𝕜 f x = fderiv 𝕜 g x) (x : E) (hfgx : f x = g x) : f = g :=
+theorem _root_.eq_of_fderiv_eq
+    {E : Type*} [NormedAddCommGroup E] [NormedSpace 𝕜 E] {f g : E → G}
+    (hf : Differentiable 𝕜 f) (hg : Differentiable 𝕜 g)
+    (hf' : ∀ x, fderiv 𝕜 f x = fderiv 𝕜 g x) (x : E) (hfgx : f x = g x) : f = g := by
+  let A : NormedSpace ℝ E := RestrictScalars.normedSpace ℝ 𝕜 E
   suffices Set.univ.EqOn f g from funext fun x => this <| mem_univ x
-  convex_univ.eqOn_of_fderivWithin_eq hf.differentiableOn hg.differentiableOn uniqueDiffOn_univ
-    (fun x _ => by simpa using hf' _) (mem_univ _) hfgx
+  exact convex_univ.eqOn_of_fderivWithin_eq hf.differentiableOn hg.differentiableOn
+    uniqueDiffOn_univ (fun x _ => by simpa using hf' _) (mem_univ _) hfgx
 #align eq_of_fderiv_eq eq_of_fderiv_eq
 
 end Convex
chore(Analysis): rename lipschitz_on_univ to lipschitzOn_univ (#6946)

Also rename dimH_image_le_of_locally_lipschitz_on to dimH_image_le_of_locally_lipschitzOn.

Diff
@@ -684,7 +684,7 @@ theorem lipschitzOnWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : ∀ x ∈ s, Diff
 then the function is `C`-Lipschitz. Version with `deriv` and `LipschitzWith`. -/
 theorem _root_.lipschitzWith_of_nnnorm_deriv_le {C : ℝ≥0} (hf : Differentiable 𝕜 f)
     (bound : ∀ x, ‖deriv f x‖₊ ≤ C) : LipschitzWith C f :=
-  lipschitz_on_univ.1 <|
+  lipschitzOn_univ.1 <|
     convex_univ.lipschitzOnWith_of_nnnorm_deriv_le (fun x _ => hf x) fun x _ => bound x
 #align lipschitz_with_of_nnnorm_deriv_le lipschitzWith_of_nnnorm_deriv_le
 
chore: remove duplicate lemma HasFDerivWithinAt.nhdsWithin (#6773)

This is a perfect duplicate of HasFDerivWithinAt.mono_of_mem.

Same thing with Deriv instead of FDeriv.

Diff
@@ -361,7 +361,7 @@ theorem norm_image_sub_le_of_norm_deriv_le_segment' {f' : ℝ → E} {C : ℝ}
   refine'
     norm_image_sub_le_of_norm_deriv_right_le_segment (fun x hx => (hf x hx).continuousWithinAt)
       (fun x hx => _) bound
-  exact (hf x <| Ico_subset_Icc_self hx).nhdsWithin (Icc_mem_nhdsWithin_Ici hx)
+  exact (hf x <| Ico_subset_Icc_self hx).mono_of_mem (Icc_mem_nhdsWithin_Ici hx)
 #align norm_image_sub_le_of_norm_deriv_le_segment' norm_image_sub_le_of_norm_deriv_le_segment'
 
 /-- A function on `[a, b]` with the norm of the derivative within `[a, b]`
@@ -427,9 +427,9 @@ theorem eq_of_derivWithin_eq (fdiff : DifferentiableOn ℝ f (Icc a b))
     (hderiv : EqOn (derivWithin f (Icc a b)) (derivWithin g (Icc a b)) (Ico a b)) (hi : f a = g a) :
     ∀ y ∈ Icc a b, f y = g y := by
   have A : ∀ y ∈ Ico a b, HasDerivWithinAt f (derivWithin f (Icc a b) y) (Ici y) y := fun y hy =>
-    (fdiff y (mem_Icc_of_Ico hy)).hasDerivWithinAt.nhdsWithin (Icc_mem_nhdsWithin_Ici hy)
+    (fdiff y (mem_Icc_of_Ico hy)).hasDerivWithinAt.mono_of_mem (Icc_mem_nhdsWithin_Ici hy)
   have B : ∀ y ∈ Ico a b, HasDerivWithinAt g (derivWithin g (Icc a b) y) (Ici y) y := fun y hy =>
-    (gdiff y (mem_Icc_of_Ico hy)).hasDerivWithinAt.nhdsWithin (Icc_mem_nhdsWithin_Ici hy)
+    (gdiff y (mem_Icc_of_Ico hy)).hasDerivWithinAt.mono_of_mem (Icc_mem_nhdsWithin_Ici hy)
   exact
     eq_of_has_deriv_right_eq A (fun y hy => (hderiv hy).symm ▸ B y hy) fdiff.continuousOn
       gdiff.continuousOn hi
chore: banish Type _ and Sort _ (#6499)

We remove all possible occurences of Type _ and Sort _ in favor of Type* and Sort*.

This has nice performance benefits.

Diff
@@ -71,7 +71,7 @@ In this file we prove the following facts:
 -/
 
 
-variable {E : Type _} [NormedAddCommGroup E] [NormedSpace ℝ E] {F : Type _} [NormedAddCommGroup F]
+variable {E : Type*} [NormedAddCommGroup E] [NormedSpace ℝ E] {F : Type*} [NormedAddCommGroup F]
   [NormedSpace ℝ F]
 
 open Metric Set Asymptotics ContinuousLinearMap Filter
@@ -247,7 +247,7 @@ Let `f` and `B` be continuous functions on `[a, b]` such that
 * we have `f' x < B' x` whenever `‖f x‖ = B x`.
 
 Then `‖f x‖ ≤ B x` everywhere on `[a, b]`. -/
-theorem image_norm_le_of_liminf_right_slope_norm_lt_deriv_boundary {E : Type _}
+theorem image_norm_le_of_liminf_right_slope_norm_lt_deriv_boundary {E : Type*}
     [NormedAddCommGroup E] {f : ℝ → E} {f' : ℝ → ℝ} (hf : ContinuousOn f (Icc a b))
     -- `hf'` actually says `liminf (‖f z‖ - ‖f x‖) / (z - x) ≤ f' x`
     (hf' : ∀ x ∈ Ico a b, ∀ r, f' x < r → ∃ᶠ z in 𝓝[>] x, slope (norm ∘ f) x z < r)
@@ -446,7 +446,7 @@ also assume `[NormedSpace ℝ E]` to have a notion of a `Convex` set. -/
 
 section
 
-variable {𝕜 G : Type _} [IsROrC 𝕜] [NormedSpace 𝕜 E] [NormedAddCommGroup G] [NormedSpace 𝕜 G]
+variable {𝕜 G : Type*} [IsROrC 𝕜] [NormedSpace 𝕜 E] [NormedAddCommGroup G] [NormedSpace 𝕜 G]
 
 namespace Convex
 
@@ -1321,7 +1321,7 @@ make sense and are enough. Many formulations of the mean value inequality could
 balls over `ℝ` or `ℂ`. For now, we only include the ones that we need.
 -/
 
-variable {𝕜 : Type _} [IsROrC 𝕜] {G : Type _} [NormedAddCommGroup G] [NormedSpace 𝕜 G] {H : Type _}
+variable {𝕜 : Type*} [IsROrC 𝕜] {G : Type*} [NormedAddCommGroup G] [NormedSpace 𝕜 G] {H : Type*}
   [NormedAddCommGroup H] [NormedSpace 𝕜 H] {f : G → H} {f' : G → G →L[𝕜] H} {x : G}
 
 /-- Over the reals or the complexes, a continuously differentiable function is strictly
refactor: split Analysis.Calculus.LocalExtr (#5944)

Also make f/f' arguments implicit in all versions of Rolle's Theorem.

Fixes #4830

API changes

  • exists_Ioo_extr_on_Icc:
    • generalize from functions f : ℝ → ℝ to functions from a conditionally complete linear order to a linear order.
    • make f implicit;
  • exists_local_extr_Ioo:
    • rename to exists_isLocalExtr_Ioo;
    • generalize as above;
    • make f implicit;
  • exists_isExtrOn_Ioo_of_tendsto, exists_isLocalExtr_Ioo_of_tendsto: new lemmas extracted from the proof of exists_hasDerivAt_eq_zero';
  • exists_hasDerivAt_eq_zero, exists_hasDerivAt_eq_zero': make f and f' implicit;
  • exists_deriv_eq_zero, exists_deriv_eq_zero': make f implicit.
Diff
@@ -5,7 +5,9 @@ Authors: Sébastien Gouëzel, Yury Kudryashov
 -/
 import Mathlib.Analysis.Calculus.Deriv.AffineMap
 import Mathlib.Analysis.Calculus.Deriv.Slope
-import Mathlib.Analysis.Calculus.LocalExtr
+import Mathlib.Analysis.Calculus.Deriv.Mul
+import Mathlib.Analysis.Calculus.Deriv.Comp
+import Mathlib.Analysis.Calculus.LocalExtr.Rolle
 import Mathlib.Analysis.Convex.Slope
 import Mathlib.Analysis.Convex.Normed
 import Mathlib.Data.IsROrC.Basic
@@ -717,7 +719,7 @@ theorem exists_ratio_hasDerivAt_eq_ratio_slope :
     ((hff' x hx).const_mul (g b - g a)).sub ((hgg' x hx).const_mul (f b - f a))
   have hhc : ContinuousOn h (Icc a b) :=
     (continuousOn_const.mul hfc).sub (continuousOn_const.mul hgc)
-  rcases exists_hasDerivAt_eq_zero h h' hab hhc hI hhh' with ⟨c, cmem, hc⟩
+  rcases exists_hasDerivAt_eq_zero hab hhc hI hhh' with ⟨c, cmem, hc⟩
   exact ⟨c, cmem, sub_eq_zero.1 hc⟩
 #align exists_ratio_has_deriv_at_eq_ratio_slope exists_ratio_hasDerivAt_eq_ratio_slope
 
chore: script to replace headers with #align_import statements (#5979)

Open in Gitpod

Co-authored-by: Eric Wieser <wieser.eric@gmail.com> Co-authored-by: Scott Morrison <scott.morrison@gmail.com>

Diff
@@ -2,11 +2,6 @@
 Copyright (c) 2019 Sébastien Gouëzel. All rights reserved.
 Released under Apache 2.0 license as described in the file LICENSE.
 Authors: Sébastien Gouëzel, Yury Kudryashov
-
-! This file was ported from Lean 3 source module analysis.calculus.mean_value
-! leanprover-community/mathlib commit 3bce8d800a6f2b8f63fe1e588fd76a9ff4adcebe
-! Please do not edit these lines, except to modify the commit id
-! if you have ported upstream changes.
 -/
 import Mathlib.Analysis.Calculus.Deriv.AffineMap
 import Mathlib.Analysis.Calculus.Deriv.Slope
@@ -16,6 +11,8 @@ import Mathlib.Analysis.Convex.Normed
 import Mathlib.Data.IsROrC.Basic
 import Mathlib.Topology.Instances.RealVectorSpace
 
+#align_import analysis.calculus.mean_value from "leanprover-community/mathlib"@"3bce8d800a6f2b8f63fe1e588fd76a9ff4adcebe"
+
 /-!
 # The mean value inequality and equalities
 
fix precedence of Nat.iterate (#5589)
Diff
@@ -1167,7 +1167,7 @@ theorem StrictAnti.strictConcaveOn_univ_of_deriv {f : ℝ → ℝ} (hf : Continu
 interior, and `f''` is nonnegative on the interior, then `f` is convex on `D`. -/
 theorem convexOn_of_deriv2_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
     (hf' : DifferentiableOn ℝ f (interior D)) (hf'' : DifferentiableOn ℝ (deriv f) (interior D))
-    (hf''_nonneg : ∀ x ∈ interior D, 0 ≤ (deriv^[2]) f x) : ConvexOn ℝ D f :=
+    (hf''_nonneg : ∀ x ∈ interior D, 0 ≤ deriv^[2] f x) : ConvexOn ℝ D f :=
   (hD.interior.monotoneOn_of_deriv_nonneg hf''.continuousOn (by rwa [interior_interior]) <| by
         rwa [interior_interior]).convexOn_of_deriv
     hD hf hf'
@@ -1177,7 +1177,7 @@ theorem convexOn_of_deriv2_nonneg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ →
 interior, and `f''` is nonpositive on the interior, then `f` is concave on `D`. -/
 theorem concaveOn_of_deriv2_nonpos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ} (hf : ContinuousOn f D)
     (hf' : DifferentiableOn ℝ f (interior D)) (hf'' : DifferentiableOn ℝ (deriv f) (interior D))
-    (hf''_nonpos : ∀ x ∈ interior D, (deriv^[2]) f x ≤ 0) : ConcaveOn ℝ D f :=
+    (hf''_nonpos : ∀ x ∈ interior D, deriv^[2] f x ≤ 0) : ConcaveOn ℝ D f :=
   (hD.interior.antitoneOn_of_deriv_nonpos hf''.continuousOn (by rwa [interior_interior]) <| by
         rwa [interior_interior]).concaveOn_of_deriv
     hD hf hf'
@@ -1188,7 +1188,7 @@ interior, then `f` is strictly convex on `D`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
 derivative being strictly positive, except at at most one point. -/
 theorem strictConvexOn_of_deriv2_pos {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ interior D, 0 < ((deriv^[2]) f) x) :
+    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ interior D, 0 < (deriv^[2] f) x) :
     StrictConvexOn ℝ D f :=
   ((hD.interior.strictMonoOn_of_deriv_pos fun z hz =>
           (differentiableAt_of_deriv_ne_zero
@@ -1202,7 +1202,7 @@ interior, then `f` is strictly concave on `D`.
 Note that we don't require twice differentiability explicitly as it already implied by the second
 derivative being strictly negative, except at at most one point. -/
 theorem strictConcaveOn_of_deriv2_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ interior D, (deriv^[2]) f x < 0) :
+    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ interior D, deriv^[2] f x < 0) :
     StrictConcaveOn ℝ D f :=
   ((hD.interior.strictAntiOn_of_deriv_neg fun z hz =>
           (differentiableAt_of_deriv_ne_zero
@@ -1215,7 +1215,7 @@ theorem strictConcaveOn_of_deriv2_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ
 `f''` is nonnegative on `D`, then `f` is convex on `D`. -/
 theorem convexOn_of_deriv2_nonneg' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf' : DifferentiableOn ℝ f D) (hf'' : DifferentiableOn ℝ (deriv f) D)
-    (hf''_nonneg : ∀ x ∈ D, 0 ≤ ((deriv^[2]) f) x) : ConvexOn ℝ D f :=
+    (hf''_nonneg : ∀ x ∈ D, 0 ≤ (deriv^[2] f) x) : ConvexOn ℝ D f :=
   convexOn_of_deriv2_nonneg hD hf'.continuousOn (hf'.mono interior_subset)
     (hf''.mono interior_subset) fun x hx => hf''_nonneg x (interior_subset hx)
 #align convex_on_of_deriv2_nonneg' convexOn_of_deriv2_nonneg'
@@ -1224,7 +1224,7 @@ theorem convexOn_of_deriv2_nonneg' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ 
 `f''` is nonpositive on `D`, then `f` is concave on `D`. -/
 theorem concaveOn_of_deriv2_nonpos' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf' : DifferentiableOn ℝ f D) (hf'' : DifferentiableOn ℝ (deriv f) D)
-    (hf''_nonpos : ∀ x ∈ D, (deriv^[2]) f x ≤ 0) : ConcaveOn ℝ D f :=
+    (hf''_nonpos : ∀ x ∈ D, deriv^[2] f x ≤ 0) : ConcaveOn ℝ D f :=
   concaveOn_of_deriv2_nonpos hD hf'.continuousOn (hf'.mono interior_subset)
     (hf''.mono interior_subset) fun x hx => hf''_nonpos x (interior_subset hx)
 #align concave_on_of_deriv2_nonpos' concaveOn_of_deriv2_nonpos'
@@ -1234,7 +1234,7 @@ then `f` is strictly convex on `D`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
 derivative being strictly positive, except at at most one point. -/
 theorem strictConvexOn_of_deriv2_pos' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ D, 0 < ((deriv^[2]) f) x) : StrictConvexOn ℝ D f :=
+    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ D, 0 < (deriv^[2] f) x) : StrictConvexOn ℝ D f :=
   strictConvexOn_of_deriv2_pos hD hf fun x hx => hf'' x (interior_subset hx)
 #align strict_convex_on_of_deriv2_pos' strictConvexOn_of_deriv2_pos'
 
@@ -1243,14 +1243,14 @@ then `f` is strictly concave on `D`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
 derivative being strictly negative, except at at most one point. -/
 theorem strictConcaveOn_of_deriv2_neg' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
-    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ D, (deriv^[2]) f x < 0) : StrictConcaveOn ℝ D f :=
+    (hf : ContinuousOn f D) (hf'' : ∀ x ∈ D, deriv^[2] f x < 0) : StrictConcaveOn ℝ D f :=
   strictConcaveOn_of_deriv2_neg hD hf fun x hx => hf'' x (interior_subset hx)
 #align strict_concave_on_of_deriv2_neg' strictConcaveOn_of_deriv2_neg'
 
 /-- If a function `f` is twice differentiable on `ℝ`, and `f''` is nonnegative on `ℝ`,
 then `f` is convex on `ℝ`. -/
 theorem convexOn_univ_of_deriv2_nonneg {f : ℝ → ℝ} (hf' : Differentiable ℝ f)
-    (hf'' : Differentiable ℝ (deriv f)) (hf''_nonneg : ∀ x, 0 ≤ ((deriv^[2]) f) x) :
+    (hf'' : Differentiable ℝ (deriv f)) (hf''_nonneg : ∀ x, 0 ≤ (deriv^[2] f) x) :
     ConvexOn ℝ univ f :=
   convexOn_of_deriv2_nonneg' convex_univ hf'.differentiableOn hf''.differentiableOn fun x _ =>
     hf''_nonneg x
@@ -1259,7 +1259,7 @@ theorem convexOn_univ_of_deriv2_nonneg {f : ℝ → ℝ} (hf' : Differentiable 
 /-- If a function `f` is twice differentiable on `ℝ`, and `f''` is nonpositive on `ℝ`,
 then `f` is concave on `ℝ`. -/
 theorem concaveOn_univ_of_deriv2_nonpos {f : ℝ → ℝ} (hf' : Differentiable ℝ f)
-    (hf'' : Differentiable ℝ (deriv f)) (hf''_nonpos : ∀ x, (deriv^[2]) f x ≤ 0) :
+    (hf'' : Differentiable ℝ (deriv f)) (hf''_nonpos : ∀ x, deriv^[2] f x ≤ 0) :
     ConcaveOn ℝ univ f :=
   concaveOn_of_deriv2_nonpos' convex_univ hf'.differentiableOn hf''.differentiableOn fun x _ =>
     hf''_nonpos x
@@ -1270,7 +1270,7 @@ then `f` is strictly convex on `ℝ`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
 derivative being strictly positive, except at at most one point. -/
 theorem strictConvexOn_univ_of_deriv2_pos {f : ℝ → ℝ} (hf : Continuous f)
-    (hf'' : ∀ x, 0 < ((deriv^[2]) f) x) : StrictConvexOn ℝ univ f :=
+    (hf'' : ∀ x, 0 < (deriv^[2] f) x) : StrictConvexOn ℝ univ f :=
   strictConvexOn_of_deriv2_pos' convex_univ hf.continuousOn fun x _ => hf'' x
 #align strict_convex_on_univ_of_deriv2_pos strictConvexOn_univ_of_deriv2_pos
 
@@ -1279,7 +1279,7 @@ then `f` is strictly concave on `ℝ`.
 Note that we don't require twice differentiability explicitly as it is already implied by the second
 derivative being strictly negative, except at at most one point. -/
 theorem strictConcaveOn_univ_of_deriv2_neg {f : ℝ → ℝ} (hf : Continuous f)
-    (hf'' : ∀ x, (deriv^[2]) f x < 0) : StrictConcaveOn ℝ univ f :=
+    (hf'' : ∀ x, deriv^[2] f x < 0) : StrictConcaveOn ℝ univ f :=
   strictConcaveOn_of_deriv2_neg' convex_univ hf.continuousOn fun x _ => hf'' x
 #align strict_concave_on_univ_of_deriv2_neg strictConcaveOn_univ_of_deriv2_neg
 
chore: clean up spacing around at and goals (#5387)

Changes are of the form

  • some_tactic at h⊢ -> some_tactic at h ⊢
  • some_tactic at h -> some_tactic at h
Diff
@@ -416,7 +416,7 @@ variable {f' g : ℝ → E}
 theorem eq_of_has_deriv_right_eq (derivf : ∀ x ∈ Ico a b, HasDerivWithinAt f (f' x) (Ici x) x)
     (derivg : ∀ x ∈ Ico a b, HasDerivWithinAt g (f' x) (Ici x) x) (fcont : ContinuousOn f (Icc a b))
     (gcont : ContinuousOn g (Icc a b)) (hi : f a = g a) : ∀ y ∈ Icc a b, f y = g y := by
-  simp only [← @sub_eq_zero _ _ (f _)] at hi⊢
+  simp only [← @sub_eq_zero _ _ (f _)] at hi ⊢
   exact hi ▸ constant_of_has_deriv_right_zero (fcont.sub gcont) fun y hy => by
     simpa only [sub_self] using (derivf y hy).sub (derivg y hy)
 #align eq_of_has_deriv_right_eq eq_of_has_deriv_right_eq
@@ -1023,7 +1023,7 @@ theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : C
     ∃ a ∈ Ioo x y, (f y - f x) / (y - x) < deriv f a := by
   by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
   · apply StrictMonoOn.exists_slope_lt_deriv_aux hf hxy hf'_mono h
-  · push_neg  at h
+  · push_neg at h
     rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
     obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ a ∈ Ioo x w, (f w - f x) / (w - x) < deriv f a := by
       apply StrictMonoOn.exists_slope_lt_deriv_aux _ hxw _ _
@@ -1042,7 +1042,7 @@ theorem StrictMonoOn.exists_slope_lt_deriv {x y : ℝ} {f : ℝ → ℝ} (hf : C
         apply ne_of_gt
         exact hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hz.1, hz.2⟩ hz.1
     refine' ⟨b, ⟨hxw.trans hwb, hby⟩, _⟩
-    simp only [div_lt_iff, hxy, hxw, hwy, sub_pos] at ha hb⊢
+    simp only [div_lt_iff, hxy, hxw, hwy, sub_pos] at ha hb ⊢
     have : deriv f a * (w - x) < deriv f b * (w - x) := by
       apply mul_lt_mul _ le_rfl (sub_pos.2 hxw) _
       · exact hf'_mono ⟨hxa, haw.trans hwy⟩ ⟨hxw.trans hwb, hby⟩ (haw.trans hwb)
@@ -1069,7 +1069,7 @@ theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : C
     ∃ a ∈ Ioo x y, deriv f a < (f y - f x) / (y - x) := by
   by_cases h : ∀ w ∈ Ioo x y, deriv f w ≠ 0
   · apply StrictMonoOn.exists_deriv_lt_slope_aux hf hxy hf'_mono h
-  · push_neg  at h
+  · push_neg at h
     rcases h with ⟨w, ⟨hxw, hwy⟩, hw⟩
     obtain ⟨a, ⟨hxa, haw⟩, ha⟩ : ∃ a ∈ Ioo x w, deriv f a < (f w - f x) / (w - x) := by
       apply StrictMonoOn.exists_deriv_lt_slope_aux _ hxw _ _
@@ -1088,7 +1088,7 @@ theorem StrictMonoOn.exists_deriv_lt_slope {x y : ℝ} {f : ℝ → ℝ} (hf : C
         apply ne_of_gt
         exact hf'_mono ⟨hxw, hwy⟩ ⟨hxw.trans hz.1, hz.2⟩ hz.1
     refine' ⟨a, ⟨hxa, haw.trans hwy⟩, _⟩
-    simp only [lt_div_iff, hxy, hxw, hwy, sub_pos] at ha hb⊢
+    simp only [lt_div_iff, hxy, hxw, hwy, sub_pos] at ha hb ⊢
     have : deriv f a * (y - w) < deriv f b * (y - w) := by
       apply mul_lt_mul _ le_rfl (sub_pos.2 hwy) _
       · exact hf'_mono ⟨hxa, haw.trans hwy⟩ ⟨hxw.trans hwb, hby⟩ (haw.trans hwb)
chore: fix grammar 1/3 (#5001)

All of these are doc fixes

Diff
@@ -1211,7 +1211,7 @@ theorem strictConcaveOn_of_deriv2_neg {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ
     hD hf
 #align strict_concave_on_of_deriv2_neg strictConcaveOn_of_deriv2_neg
 
-/-- If a function `f` is twice differentiable on a open convex set `D ⊆ ℝ` and
+/-- If a function `f` is twice differentiable on an open convex set `D ⊆ ℝ` and
 `f''` is nonnegative on `D`, then `f` is convex on `D`. -/
 theorem convexOn_of_deriv2_nonneg' {D : Set ℝ} (hD : Convex ℝ D) {f : ℝ → ℝ}
     (hf' : DifferentiableOn ℝ f D) (hf'' : DifferentiableOn ℝ (deriv f) D)
feat: port Analysis.Calculus.MeanValue (#4249)

Co-authored-by: Floris van Doorn <fpvdoorn@gmail.com> Co-authored-by: Jeremy Tan Jie Rui <reddeloostw@gmail.com>

Dependencies 10 + 763

764 files ported (98.7%)
338662 lines ported (98.5%)
Show graph

The unported dependencies are

The following 1 dependencies have changed in mathlib3 since they were ported, which may complicate porting this file